From 2a38656933bca5585d643dc0760f06ccbffd6f0e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 10:56:48 -0500 Subject: [PATCH 1/6] chore: Update gapic-generator-python to v1.8.2 (#151) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.2 PiperOrigin-RevId: 504289125 Source-Link: https://github.com/googleapis/googleapis/commit/38a48a44a44279e9cf9f2f864b588958a2d87491 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2dc22663dbe47a972c8d8c2f8a4df013dafdcbc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJkYzIyNjYzZGJlNDdhOTcyYzhkOGMyZjhhNGRmMDEzZGFmZGNiYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .coveragerc | 1 + google/cloud/deploy_v1/__init__.py | 2 +- .../snippet_metadata_google.cloud.deploy.v1.json | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.coveragerc b/.coveragerc index ae78507..57f7b8e 100644 --- a/.coveragerc +++ b/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/deploy/__init__.py + google/cloud/deploy/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/google/cloud/deploy_v1/__init__.py b/google/cloud/deploy_v1/__init__.py index c2a598b..cbbf952 100644 --- a/google/cloud/deploy_v1/__init__.py +++ b/google/cloud/deploy_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.deploy import gapic_version as package_version +from google.cloud.deploy_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index b5d5cda..bbadce0 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "1.6.1" + "version": "0.1.0" }, "snippets": [ { From d14bc8422382bfa69c67e474e09b8693aa1e63a1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Jan 2023 16:48:37 +0000 Subject: [PATCH 2/6] chore: fix prerelease_deps nox session [autoapprove] (#152) Source-Link: https://togithub.com/googleapis/synthtool/commit/26c7505b2f76981ec1707b851e1595c8c06e90fc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 --- .github/.OwlBot.lock.yaml | 2 +- noxfile.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 889f77d..f0f3b24 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 + digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 diff --git a/noxfile.py b/noxfile.py index e716318..95e58c5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -346,9 +346,7 @@ def prerelease_deps(session): unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES ) session.install(*system_deps_all) @@ -378,8 +376,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", From 6476d7a0c71e026762638653562f68e5162c4fe3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 07:07:44 -0500 Subject: [PATCH 3/6] chore: Update gapic-generator-python to v1.8.4 (#153) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.4 PiperOrigin-RevId: 507808936 Source-Link: https://github.com/googleapis/googleapis/commit/64cf8492b21778ce62c66ecee81b468a293bfd4c Source-Link: https://github.com/googleapis/googleapis-gen/commit/53c48cac153d3b37f3d2c2dec4830cfd91ec4153 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTNjNDhjYWMxNTNkM2IzN2YzZDJjMmRlYzQ4MzBjZmQ5MWVjNDE1MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- setup.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 97a9a13..1a8ba75 100644 --- a/setup.py +++ b/setup.py @@ -56,9 +56,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, From 6a9155650001972e2e5c587a8d4b27011da7ce1b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 15:12:12 +0000 Subject: [PATCH 4/6] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#154) Source-Link: https://togithub.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 49 ++++++++++++++++++--------------------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f0f3b24..894fb6b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 05dc467..096e480 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From 3d6d5fe5c742361a9b00c4826e98d1d450743931 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Feb 2023 15:36:47 -0500 Subject: [PATCH 5/6] feat: enable "rest" transport in Python for services supporting numeric enums (#155) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: enable "rest" transport in Python for services supporting numeric enums PiperOrigin-RevId: 508143576 Source-Link: https://github.com/googleapis/googleapis/commit/7a702a989db3b413f39ff8994ca53fb38b6928c2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6ad1279c0e7aa787ac6b66c9fd4a210692edffcd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmFkMTI3OWMwZTdhYTc4N2FjNmI2NmM5ZmQ0YTIxMDY5MmVkZmZjZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/deploy_v1/gapic_metadata.json | 115 + .../deploy_v1/services/cloud_deploy/client.py | 2 + .../cloud_deploy/transports/__init__.py | 4 + .../services/cloud_deploy/transports/rest.py | 3977 ++++++++ .../unit/gapic/deploy_v1/test_cloud_deploy.py | 8673 ++++++++++++++++- 5 files changed, 12349 insertions(+), 422 deletions(-) create mode 100644 google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py diff --git a/google/cloud/deploy_v1/gapic_metadata.json b/google/cloud/deploy_v1/gapic_metadata.json index 92f86fe..37de069 100644 --- a/google/cloud/deploy_v1/gapic_metadata.json +++ b/google/cloud/deploy_v1/gapic_metadata.json @@ -236,6 +236,121 @@ ] } } + }, + "rest": { + "libraryClient": "CloudDeployClient", + "rpcs": { + "AbandonRelease": { + "methods": [ + "abandon_release" + ] + }, + "ApproveRollout": { + "methods": [ + "approve_rollout" + ] + }, + "CreateDeliveryPipeline": { + "methods": [ + "create_delivery_pipeline" + ] + }, + "CreateRelease": { + "methods": [ + "create_release" + ] + }, + "CreateRollout": { + "methods": [ + "create_rollout" + ] + }, + "CreateTarget": { + "methods": [ + "create_target" + ] + }, + "DeleteDeliveryPipeline": { + "methods": [ + "delete_delivery_pipeline" + ] + }, + "DeleteTarget": { + "methods": [ + "delete_target" + ] + }, + "GetConfig": { + "methods": [ + "get_config" + ] + }, + "GetDeliveryPipeline": { + "methods": [ + "get_delivery_pipeline" + ] + }, + "GetJobRun": { + "methods": [ + "get_job_run" + ] + }, + "GetRelease": { + "methods": [ + "get_release" + ] + }, + "GetRollout": { + "methods": [ + "get_rollout" + ] + }, + "GetTarget": { + "methods": [ + "get_target" + ] + }, + "ListDeliveryPipelines": { + "methods": [ + "list_delivery_pipelines" + ] + }, + "ListJobRuns": { + "methods": [ + "list_job_runs" + ] + }, + "ListReleases": { + "methods": [ + "list_releases" + ] + }, + "ListRollouts": { + "methods": [ + "list_rollouts" + ] + }, + "ListTargets": { + "methods": [ + "list_targets" + ] + }, + "RetryJob": { + "methods": [ + "retry_job" + ] + }, + "UpdateDeliveryPipeline": { + "methods": [ + "update_delivery_pipeline" + ] + }, + "UpdateTarget": { + "methods": [ + "update_target" + ] + } + } } } } diff --git a/google/cloud/deploy_v1/services/cloud_deploy/client.py b/google/cloud/deploy_v1/services/cloud_deploy/client.py index ca8bcb4..62ea3f0 100644 --- a/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -62,6 +62,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, CloudDeployTransport from .transports.grpc import CloudDeployGrpcTransport from .transports.grpc_asyncio import CloudDeployGrpcAsyncIOTransport +from .transports.rest import CloudDeployRestTransport class CloudDeployClientMeta(type): @@ -75,6 +76,7 @@ class CloudDeployClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[CloudDeployTransport]] _transport_registry["grpc"] = CloudDeployGrpcTransport _transport_registry["grpc_asyncio"] = CloudDeployGrpcAsyncIOTransport + _transport_registry["rest"] = CloudDeployRestTransport def get_transport_class( cls, diff --git a/google/cloud/deploy_v1/services/cloud_deploy/transports/__init__.py b/google/cloud/deploy_v1/services/cloud_deploy/transports/__init__.py index bc60ea0..9ba4086 100644 --- a/google/cloud/deploy_v1/services/cloud_deploy/transports/__init__.py +++ b/google/cloud/deploy_v1/services/cloud_deploy/transports/__init__.py @@ -19,14 +19,18 @@ from .base import CloudDeployTransport from .grpc import CloudDeployGrpcTransport from .grpc_asyncio import CloudDeployGrpcAsyncIOTransport +from .rest import CloudDeployRestInterceptor, CloudDeployRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudDeployTransport]] _transport_registry["grpc"] = CloudDeployGrpcTransport _transport_registry["grpc_asyncio"] = CloudDeployGrpcAsyncIOTransport +_transport_registry["rest"] = CloudDeployRestTransport __all__ = ( "CloudDeployTransport", "CloudDeployGrpcTransport", "CloudDeployGrpcAsyncIOTransport", + "CloudDeployRestTransport", + "CloudDeployRestInterceptor", ) diff --git a/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py b/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py new file mode 100644 index 0000000..3056c17 --- /dev/null +++ b/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py @@ -0,0 +1,3977 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.deploy_v1.types import cloud_deploy + +from .base import CloudDeployTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudDeployRestInterceptor: + """Interceptor for CloudDeploy. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudDeployRestTransport. + + .. code-block:: python + class MyCustomCloudDeployInterceptor(CloudDeployRestInterceptor): + def pre_abandon_release(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_abandon_release(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_approve_rollout(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_approve_rollout(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_delivery_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_delivery_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_release(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_release(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_rollout(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_rollout(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_target(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_target(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_delivery_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_delivery_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_target(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_target(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_delivery_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_delivery_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job_run(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_run(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_release(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_release(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_rollout(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_rollout(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_target(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_target(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_delivery_pipelines(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_delivery_pipelines(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_job_runs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_runs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_releases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_releases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_rollouts(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_rollouts(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_targets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_targets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retry_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retry_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_delivery_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_delivery_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_target(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_target(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudDeployRestTransport(interceptor=MyCustomCloudDeployInterceptor()) + client = CloudDeployClient(transport=transport) + + + """ + + def pre_abandon_release( + self, + request: cloud_deploy.AbandonReleaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.AbandonReleaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for abandon_release + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_abandon_release( + self, response: cloud_deploy.AbandonReleaseResponse + ) -> cloud_deploy.AbandonReleaseResponse: + """Post-rpc interceptor for abandon_release + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_approve_rollout( + self, + request: cloud_deploy.ApproveRolloutRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ApproveRolloutRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for approve_rollout + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_approve_rollout( + self, response: cloud_deploy.ApproveRolloutResponse + ) -> cloud_deploy.ApproveRolloutResponse: + """Post-rpc interceptor for approve_rollout + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_create_delivery_pipeline( + self, + request: cloud_deploy.CreateDeliveryPipelineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.CreateDeliveryPipelineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_delivery_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_create_delivery_pipeline( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_delivery_pipeline + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_create_release( + self, + request: cloud_deploy.CreateReleaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.CreateReleaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_release + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_create_release( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_release + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_create_rollout( + self, + request: cloud_deploy.CreateRolloutRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.CreateRolloutRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_rollout + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_create_rollout( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_rollout + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_create_target( + self, + request: cloud_deploy.CreateTargetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.CreateTargetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_target + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_create_target( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_target + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_delete_delivery_pipeline( + self, + request: cloud_deploy.DeleteDeliveryPipelineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.DeleteDeliveryPipelineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_delivery_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_delete_delivery_pipeline( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_delivery_pipeline + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_delete_target( + self, + request: cloud_deploy.DeleteTargetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.DeleteTargetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_target + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_delete_target( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_target + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_config( + self, + request: cloud_deploy.GetConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_config(self, response: cloud_deploy.Config) -> cloud_deploy.Config: + """Post-rpc interceptor for get_config + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_delivery_pipeline( + self, + request: cloud_deploy.GetDeliveryPipelineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetDeliveryPipelineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_delivery_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_delivery_pipeline( + self, response: cloud_deploy.DeliveryPipeline + ) -> cloud_deploy.DeliveryPipeline: + """Post-rpc interceptor for get_delivery_pipeline + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_job_run( + self, + request: cloud_deploy.GetJobRunRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetJobRunRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_run + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_job_run(self, response: cloud_deploy.JobRun) -> cloud_deploy.JobRun: + """Post-rpc interceptor for get_job_run + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_release( + self, + request: cloud_deploy.GetReleaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetReleaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_release + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_release(self, response: cloud_deploy.Release) -> cloud_deploy.Release: + """Post-rpc interceptor for get_release + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_rollout( + self, + request: cloud_deploy.GetRolloutRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetRolloutRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_rollout + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_rollout(self, response: cloud_deploy.Rollout) -> cloud_deploy.Rollout: + """Post-rpc interceptor for get_rollout + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_target( + self, + request: cloud_deploy.GetTargetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.GetTargetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_target + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_target(self, response: cloud_deploy.Target) -> cloud_deploy.Target: + """Post-rpc interceptor for get_target + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_list_delivery_pipelines( + self, + request: cloud_deploy.ListDeliveryPipelinesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListDeliveryPipelinesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_delivery_pipelines + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_delivery_pipelines( + self, response: cloud_deploy.ListDeliveryPipelinesResponse + ) -> cloud_deploy.ListDeliveryPipelinesResponse: + """Post-rpc interceptor for list_delivery_pipelines + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_list_job_runs( + self, + request: cloud_deploy.ListJobRunsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListJobRunsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_job_runs + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_job_runs( + self, response: cloud_deploy.ListJobRunsResponse + ) -> cloud_deploy.ListJobRunsResponse: + """Post-rpc interceptor for list_job_runs + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_list_releases( + self, + request: cloud_deploy.ListReleasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListReleasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_releases + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_releases( + self, response: cloud_deploy.ListReleasesResponse + ) -> cloud_deploy.ListReleasesResponse: + """Post-rpc interceptor for list_releases + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_list_rollouts( + self, + request: cloud_deploy.ListRolloutsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListRolloutsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_rollouts + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_rollouts( + self, response: cloud_deploy.ListRolloutsResponse + ) -> cloud_deploy.ListRolloutsResponse: + """Post-rpc interceptor for list_rollouts + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_list_targets( + self, + request: cloud_deploy.ListTargetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.ListTargetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_targets + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_targets( + self, response: cloud_deploy.ListTargetsResponse + ) -> cloud_deploy.ListTargetsResponse: + """Post-rpc interceptor for list_targets + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_retry_job( + self, request: cloud_deploy.RetryJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloud_deploy.RetryJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retry_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_retry_job( + self, response: cloud_deploy.RetryJobResponse + ) -> cloud_deploy.RetryJobResponse: + """Post-rpc interceptor for retry_job + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_update_delivery_pipeline( + self, + request: cloud_deploy.UpdateDeliveryPipelineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.UpdateDeliveryPipelineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_delivery_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_update_delivery_pipeline( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_delivery_pipeline + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_update_target( + self, + request: cloud_deploy.UpdateTargetRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_deploy.UpdateTargetRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_target + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_update_target( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_target + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.Location: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.GetLocationRequest + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.ListLocationsResponse: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsRequest + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> policy_pb2.Policy: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_iam_policy( + self, response: iam_policy_pb2.GetIamPolicyRequest + ) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> policy_pb2.Policy: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_set_iam_policy( + self, response: iam_policy_pb2.SetIamPolicyRequest + ) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsRequest + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_cancel_operation( + self, response: operations_pb2.CancelOperationRequest + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_delete_operation( + self, response: operations_pb2.DeleteOperationRequest + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.Operation: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.GetOperationRequest + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.ListOperationsResponse: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudDeploy server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsRequest + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudDeployRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudDeployRestInterceptor + + +class CloudDeployRestTransport(CloudDeployTransport): + """REST backend transport for CloudDeploy. + + CloudDeploy service creates and manages Continuous Delivery + operations on Google Cloud Platform via Skaffold + (https://skaffold.dev). + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "clouddeploy.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudDeployRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudDeployRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _AbandonRelease(CloudDeployRestStub): + def __hash__(self): + return hash("AbandonRelease") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.AbandonReleaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.AbandonReleaseResponse: + r"""Call the abandon release method over HTTP. + + Args: + request (~.cloud_deploy.AbandonReleaseRequest): + The request object. The request object used by ``AbandonRelease``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.AbandonReleaseResponse: + The response object for ``AbandonRelease``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_abandon_release(request, metadata) + pb_request = cloud_deploy.AbandonReleaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.AbandonReleaseResponse() + pb_resp = cloud_deploy.AbandonReleaseResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_abandon_release(resp) + return resp + + class _ApproveRollout(CloudDeployRestStub): + def __hash__(self): + return hash("ApproveRollout") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ApproveRolloutRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ApproveRolloutResponse: + r"""Call the approve rollout method over HTTP. + + Args: + request (~.cloud_deploy.ApproveRolloutRequest): + The request object. The request object used by ``ApproveRollout``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ApproveRolloutResponse: + The response object from ``ApproveRollout``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*}:approve", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_approve_rollout(request, metadata) + pb_request = cloud_deploy.ApproveRolloutRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ApproveRolloutResponse() + pb_resp = cloud_deploy.ApproveRolloutResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_approve_rollout(resp) + return resp + + class _CreateDeliveryPipeline(CloudDeployRestStub): + def __hash__(self): + return hash("CreateDeliveryPipeline") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "deliveryPipelineId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.CreateDeliveryPipelineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create delivery pipeline method over HTTP. + + Args: + request (~.cloud_deploy.CreateDeliveryPipelineRequest): + The request object. The request object for ``CreateDeliveryPipeline``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/deliveryPipelines", + "body": "delivery_pipeline", + }, + ] + request, metadata = self._interceptor.pre_create_delivery_pipeline( + request, metadata + ) + pb_request = cloud_deploy.CreateDeliveryPipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_delivery_pipeline(resp) + return resp + + class _CreateRelease(CloudDeployRestStub): + def __hash__(self): + return hash("CreateRelease") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "releaseId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.CreateReleaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create release method over HTTP. + + Args: + request (~.cloud_deploy.CreateReleaseRequest): + The request object. The request object for ``CreateRelease``, + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases", + "body": "release", + }, + ] + request, metadata = self._interceptor.pre_create_release(request, metadata) + pb_request = cloud_deploy.CreateReleaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_release(resp) + return resp + + class _CreateRollout(CloudDeployRestStub): + def __hash__(self): + return hash("CreateRollout") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "rolloutId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.CreateRolloutRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create rollout method over HTTP. + + Args: + request (~.cloud_deploy.CreateRolloutRequest): + The request object. CreateRolloutRequest is the request object used by + ``CreateRollout``. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/deliveryPipelines/*/releases/*}/rollouts", + "body": "rollout", + }, + ] + request, metadata = self._interceptor.pre_create_rollout(request, metadata) + pb_request = cloud_deploy.CreateRolloutRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_rollout(resp) + return resp + + class _CreateTarget(CloudDeployRestStub): + def __hash__(self): + return hash("CreateTarget") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "targetId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.CreateTargetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create target method over HTTP. + + Args: + request (~.cloud_deploy.CreateTargetRequest): + The request object. The request object for ``CreateTarget``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/targets", + "body": "target", + }, + ] + request, metadata = self._interceptor.pre_create_target(request, metadata) + pb_request = cloud_deploy.CreateTargetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_target(resp) + return resp + + class _DeleteDeliveryPipeline(CloudDeployRestStub): + def __hash__(self): + return hash("DeleteDeliveryPipeline") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.DeleteDeliveryPipelineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete delivery pipeline method over HTTP. + + Args: + request (~.cloud_deploy.DeleteDeliveryPipelineRequest): + The request object. The request object for ``DeleteDeliveryPipeline``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_delivery_pipeline( + request, metadata + ) + pb_request = cloud_deploy.DeleteDeliveryPipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_delivery_pipeline(resp) + return resp + + class _DeleteTarget(CloudDeployRestStub): + def __hash__(self): + return hash("DeleteTarget") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.DeleteTargetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete target method over HTTP. + + Args: + request (~.cloud_deploy.DeleteTargetRequest): + The request object. The request object for ``DeleteTarget``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/targets/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_target(request, metadata) + pb_request = cloud_deploy.DeleteTargetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_target(resp) + return resp + + class _GetConfig(CloudDeployRestStub): + def __hash__(self): + return hash("GetConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.Config: + r"""Call the get config method over HTTP. + + Args: + request (~.cloud_deploy.GetConfigRequest): + The request object. Request to get a configuration. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.Config: + Service-wide configuration. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/config}", + }, + ] + request, metadata = self._interceptor.pre_get_config(request, metadata) + pb_request = cloud_deploy.GetConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.Config() + pb_resp = cloud_deploy.Config.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_config(resp) + return resp + + class _GetDeliveryPipeline(CloudDeployRestStub): + def __hash__(self): + return hash("GetDeliveryPipeline") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetDeliveryPipelineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.DeliveryPipeline: + r"""Call the get delivery pipeline method over HTTP. + + Args: + request (~.cloud_deploy.GetDeliveryPipelineRequest): + The request object. The request object for ``GetDeliveryPipeline`` + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.DeliveryPipeline: + A ``DeliveryPipeline`` resource in the Google Cloud + Deploy API. + + A ``DeliveryPipeline`` defines a pipeline through which + a Skaffold configuration can progress. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*}", + }, + ] + request, metadata = self._interceptor.pre_get_delivery_pipeline( + request, metadata + ) + pb_request = cloud_deploy.GetDeliveryPipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.DeliveryPipeline() + pb_resp = cloud_deploy.DeliveryPipeline.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_delivery_pipeline(resp) + return resp + + class _GetJobRun(CloudDeployRestStub): + def __hash__(self): + return hash("GetJobRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetJobRunRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.JobRun: + r"""Call the get job run method over HTTP. + + Args: + request (~.cloud_deploy.GetJobRunRequest): + The request object. GetJobRunRequest is the request object used by + ``GetJobRun``. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.JobRun: + A ``JobRun`` resource in the Google Cloud Deploy API. + + A ``JobRun`` contains information of a single + ``Rollout`` job evaluation. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*/jobRuns/*}", + }, + ] + request, metadata = self._interceptor.pre_get_job_run(request, metadata) + pb_request = cloud_deploy.GetJobRunRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.JobRun() + pb_resp = cloud_deploy.JobRun.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_run(resp) + return resp + + class _GetRelease(CloudDeployRestStub): + def __hash__(self): + return hash("GetRelease") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetReleaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.Release: + r"""Call the get release method over HTTP. + + Args: + request (~.cloud_deploy.GetReleaseRequest): + The request object. The request object for ``GetRelease``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.Release: + A ``Release`` resource in the Google Cloud Deploy API. + + A ``Release`` defines a specific Skaffold configuration + instance that can be deployed. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}", + }, + ] + request, metadata = self._interceptor.pre_get_release(request, metadata) + pb_request = cloud_deploy.GetReleaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.Release() + pb_resp = cloud_deploy.Release.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_release(resp) + return resp + + class _GetRollout(CloudDeployRestStub): + def __hash__(self): + return hash("GetRollout") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetRolloutRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.Rollout: + r"""Call the get rollout method over HTTP. + + Args: + request (~.cloud_deploy.GetRolloutRequest): + The request object. GetRolloutRequest is the request object used by + ``GetRollout``. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.Rollout: + A ``Rollout`` resource in the Google Cloud Deploy API. + + A ``Rollout`` contains information around a specific + deployment to a ``Target``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*}", + }, + ] + request, metadata = self._interceptor.pre_get_rollout(request, metadata) + pb_request = cloud_deploy.GetRolloutRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.Rollout() + pb_resp = cloud_deploy.Rollout.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_rollout(resp) + return resp + + class _GetTarget(CloudDeployRestStub): + def __hash__(self): + return hash("GetTarget") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.GetTargetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.Target: + r"""Call the get target method over HTTP. + + Args: + request (~.cloud_deploy.GetTargetRequest): + The request object. The request object for ``GetTarget``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.Target: + A ``Target`` resource in the Google Cloud Deploy API. + + A ``Target`` defines a location to which a Skaffold + configuration can be deployed. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/targets/*}", + }, + ] + request, metadata = self._interceptor.pre_get_target(request, metadata) + pb_request = cloud_deploy.GetTargetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.Target() + pb_resp = cloud_deploy.Target.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_target(resp) + return resp + + class _ListDeliveryPipelines(CloudDeployRestStub): + def __hash__(self): + return hash("ListDeliveryPipelines") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListDeliveryPipelinesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListDeliveryPipelinesResponse: + r"""Call the list delivery pipelines method over HTTP. + + Args: + request (~.cloud_deploy.ListDeliveryPipelinesRequest): + The request object. The request object for ``ListDeliveryPipelines``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListDeliveryPipelinesResponse: + The response object from ``ListDeliveryPipelines``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/deliveryPipelines", + }, + ] + request, metadata = self._interceptor.pre_list_delivery_pipelines( + request, metadata + ) + pb_request = cloud_deploy.ListDeliveryPipelinesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListDeliveryPipelinesResponse() + pb_resp = cloud_deploy.ListDeliveryPipelinesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_delivery_pipelines(resp) + return resp + + class _ListJobRuns(CloudDeployRestStub): + def __hash__(self): + return hash("ListJobRuns") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListJobRunsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListJobRunsResponse: + r"""Call the list job runs method over HTTP. + + Args: + request (~.cloud_deploy.ListJobRunsRequest): + The request object. ListJobRunsRequest is the request object used by + ``ListJobRuns``. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListJobRunsResponse: + ListJobRunsResponse is the response object returned by + ``ListJobRuns``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*}/jobRuns", + }, + ] + request, metadata = self._interceptor.pre_list_job_runs(request, metadata) + pb_request = cloud_deploy.ListJobRunsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListJobRunsResponse() + pb_resp = cloud_deploy.ListJobRunsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_job_runs(resp) + return resp + + class _ListReleases(CloudDeployRestStub): + def __hash__(self): + return hash("ListReleases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListReleasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListReleasesResponse: + r"""Call the list releases method over HTTP. + + Args: + request (~.cloud_deploy.ListReleasesRequest): + The request object. The request object for ``ListReleases``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListReleasesResponse: + The response object from ``ListReleases``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases", + }, + ] + request, metadata = self._interceptor.pre_list_releases(request, metadata) + pb_request = cloud_deploy.ListReleasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListReleasesResponse() + pb_resp = cloud_deploy.ListReleasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_releases(resp) + return resp + + class _ListRollouts(CloudDeployRestStub): + def __hash__(self): + return hash("ListRollouts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListRolloutsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListRolloutsResponse: + r"""Call the list rollouts method over HTTP. + + Args: + request (~.cloud_deploy.ListRolloutsRequest): + The request object. ListRolloutsRequest is the request object used by + ``ListRollouts``. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListRolloutsResponse: + ListRolloutsResponse is the response object reutrned by + ``ListRollouts``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/deliveryPipelines/*/releases/*}/rollouts", + }, + ] + request, metadata = self._interceptor.pre_list_rollouts(request, metadata) + pb_request = cloud_deploy.ListRolloutsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListRolloutsResponse() + pb_resp = cloud_deploy.ListRolloutsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_rollouts(resp) + return resp + + class _ListTargets(CloudDeployRestStub): + def __hash__(self): + return hash("ListTargets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.ListTargetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.ListTargetsResponse: + r"""Call the list targets method over HTTP. + + Args: + request (~.cloud_deploy.ListTargetsRequest): + The request object. The request object for ``ListTargets``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.ListTargetsResponse: + The response object from ``ListTargets``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/targets", + }, + ] + request, metadata = self._interceptor.pre_list_targets(request, metadata) + pb_request = cloud_deploy.ListTargetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.ListTargetsResponse() + pb_resp = cloud_deploy.ListTargetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_targets(resp) + return resp + + class _RetryJob(CloudDeployRestStub): + def __hash__(self): + return hash("RetryJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.RetryJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_deploy.RetryJobResponse: + r"""Call the retry job method over HTTP. + + Args: + request (~.cloud_deploy.RetryJobRequest): + The request object. RetryJobRequest is the request object used by + ``RetryJob``. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_deploy.RetryJobResponse: + The response object from 'RetryJob'. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{rollout=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*}:retryJob", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_retry_job(request, metadata) + pb_request = cloud_deploy.RetryJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_deploy.RetryJobResponse() + pb_resp = cloud_deploy.RetryJobResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retry_job(resp) + return resp + + class _UpdateDeliveryPipeline(CloudDeployRestStub): + def __hash__(self): + return hash("UpdateDeliveryPipeline") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.UpdateDeliveryPipelineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update delivery pipeline method over HTTP. + + Args: + request (~.cloud_deploy.UpdateDeliveryPipelineRequest): + The request object. The request object for ``UpdateDeliveryPipeline``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}", + "body": "delivery_pipeline", + }, + ] + request, metadata = self._interceptor.pre_update_delivery_pipeline( + request, metadata + ) + pb_request = cloud_deploy.UpdateDeliveryPipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_delivery_pipeline(resp) + return resp + + class _UpdateTarget(CloudDeployRestStub): + def __hash__(self): + return hash("UpdateTarget") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_deploy.UpdateTargetRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update target method over HTTP. + + Args: + request (~.cloud_deploy.UpdateTargetRequest): + The request object. The request object for ``UpdateTarget``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{target.name=projects/*/locations/*/targets/*}", + "body": "target", + }, + ] + request, metadata = self._interceptor.pre_update_target(request, metadata) + pb_request = cloud_deploy.UpdateTargetRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_target(resp) + return resp + + @property + def abandon_release( + self, + ) -> Callable[ + [cloud_deploy.AbandonReleaseRequest], cloud_deploy.AbandonReleaseResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AbandonRelease(self._session, self._host, self._interceptor) # type: ignore + + @property + def approve_rollout( + self, + ) -> Callable[ + [cloud_deploy.ApproveRolloutRequest], cloud_deploy.ApproveRolloutResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApproveRollout(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_delivery_pipeline( + self, + ) -> Callable[ + [cloud_deploy.CreateDeliveryPipelineRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_release( + self, + ) -> Callable[[cloud_deploy.CreateReleaseRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRelease(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_rollout( + self, + ) -> Callable[[cloud_deploy.CreateRolloutRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRollout(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_target( + self, + ) -> Callable[[cloud_deploy.CreateTargetRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTarget(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_delivery_pipeline( + self, + ) -> Callable[ + [cloud_deploy.DeleteDeliveryPipelineRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_target( + self, + ) -> Callable[[cloud_deploy.DeleteTargetRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTarget(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_config( + self, + ) -> Callable[[cloud_deploy.GetConfigRequest], cloud_deploy.Config]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_delivery_pipeline( + self, + ) -> Callable[ + [cloud_deploy.GetDeliveryPipelineRequest], cloud_deploy.DeliveryPipeline + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_run( + self, + ) -> Callable[[cloud_deploy.GetJobRunRequest], cloud_deploy.JobRun]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobRun(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_release( + self, + ) -> Callable[[cloud_deploy.GetReleaseRequest], cloud_deploy.Release]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRelease(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_rollout( + self, + ) -> Callable[[cloud_deploy.GetRolloutRequest], cloud_deploy.Rollout]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRollout(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_target( + self, + ) -> Callable[[cloud_deploy.GetTargetRequest], cloud_deploy.Target]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTarget(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_delivery_pipelines( + self, + ) -> Callable[ + [cloud_deploy.ListDeliveryPipelinesRequest], + cloud_deploy.ListDeliveryPipelinesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeliveryPipelines(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_job_runs( + self, + ) -> Callable[[cloud_deploy.ListJobRunsRequest], cloud_deploy.ListJobRunsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobRuns(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_releases( + self, + ) -> Callable[ + [cloud_deploy.ListReleasesRequest], cloud_deploy.ListReleasesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReleases(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_rollouts( + self, + ) -> Callable[ + [cloud_deploy.ListRolloutsRequest], cloud_deploy.ListRolloutsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRollouts(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_targets( + self, + ) -> Callable[[cloud_deploy.ListTargetsRequest], cloud_deploy.ListTargetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTargets(self._session, self._host, self._interceptor) # type: ignore + + @property + def retry_job( + self, + ) -> Callable[[cloud_deploy.RetryJobRequest], cloud_deploy.RetryJobResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetryJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_delivery_pipeline( + self, + ) -> Callable[ + [cloud_deploy.UpdateDeliveryPipelineRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeliveryPipeline(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_target( + self, + ) -> Callable[[cloud_deploy.UpdateTargetRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTarget(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudDeployRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudDeployRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(CloudDeployRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/deliveryPipelines/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/targets/*}:getIamPolicy", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(CloudDeployRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/deliveryPipelines/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/targets/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(CloudDeployRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/deliveryPipelines/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/targets/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(CloudDeployRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(CloudDeployRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(CloudDeployRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(CloudDeployRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudDeployRestTransport",) diff --git a/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index b20bb5d..1cba652 100644 --- a/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import ( @@ -48,12 +50,15 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.deploy_v1.services.cloud_deploy import ( CloudDeployAsyncClient, @@ -110,6 +115,7 @@ def test__get_default_mtls_endpoint(): [ (CloudDeployClient, "grpc"), (CloudDeployAsyncClient, "grpc_asyncio"), + (CloudDeployClient, "rest"), ], ) def test_cloud_deploy_client_from_service_account_info(client_class, transport_name): @@ -123,7 +129,11 @@ def test_cloud_deploy_client_from_service_account_info(client_class, transport_n assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("clouddeploy.googleapis.com:443") + assert client.transport._host == ( + "clouddeploy.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouddeploy.googleapis.com" + ) @pytest.mark.parametrize( @@ -131,6 +141,7 @@ def test_cloud_deploy_client_from_service_account_info(client_class, transport_n [ (transports.CloudDeployGrpcTransport, "grpc"), (transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudDeployRestTransport, "rest"), ], ) def test_cloud_deploy_client_service_account_always_use_jwt( @@ -156,6 +167,7 @@ def test_cloud_deploy_client_service_account_always_use_jwt( [ (CloudDeployClient, "grpc"), (CloudDeployAsyncClient, "grpc_asyncio"), + (CloudDeployClient, "rest"), ], ) def test_cloud_deploy_client_from_service_account_file(client_class, transport_name): @@ -176,13 +188,18 @@ def test_cloud_deploy_client_from_service_account_file(client_class, transport_n assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("clouddeploy.googleapis.com:443") + assert client.transport._host == ( + "clouddeploy.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouddeploy.googleapis.com" + ) def test_cloud_deploy_client_get_transport_class(): transport = CloudDeployClient.get_transport_class() available_transports = [ transports.CloudDeployGrpcTransport, + transports.CloudDeployRestTransport, ] assert transport in available_transports @@ -199,6 +216,7 @@ def test_cloud_deploy_client_get_transport_class(): transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", ), + (CloudDeployClient, transports.CloudDeployRestTransport, "rest"), ], ) @mock.patch.object( @@ -342,6 +360,8 @@ def test_cloud_deploy_client_client_options( "grpc_asyncio", "false", ), + (CloudDeployClient, transports.CloudDeployRestTransport, "rest", "true"), + (CloudDeployClient, transports.CloudDeployRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -535,6 +555,7 @@ def test_cloud_deploy_client_get_mtls_endpoint_and_cert_source(client_class): transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", ), + (CloudDeployClient, transports.CloudDeployRestTransport, "rest"), ], ) def test_cloud_deploy_client_client_options_scopes( @@ -570,6 +591,7 @@ def test_cloud_deploy_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (CloudDeployClient, transports.CloudDeployRestTransport, "rest", None), ], ) def test_cloud_deploy_client_client_options_credentials_file( @@ -7003,141 +7025,7293 @@ async def test_get_config_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudDeployGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListDeliveryPipelinesRequest, + dict, + ], +) +def test_list_delivery_pipelines_rest(request_type): + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudDeployGrpcTransport( + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_delivery_pipelines(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeliveryPipelinesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_delivery_pipelines_rest_required_fields( + request_type=cloud_deploy.ListDeliveryPipelinesRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_delivery_pipelines._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_delivery_pipelines(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_delivery_pipelines_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - with pytest.raises(ValueError): - client = CloudDeployClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + + unset_fields = transport.list_delivery_pipelines._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) + & set(("parent",)) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudDeployGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_delivery_pipelines_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudDeployClient( - client_options=options, - transport=transport, + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( + cloud_deploy.ListDeliveryPipelinesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListDeliveryPipelinesResponse.to_json( + cloud_deploy.ListDeliveryPipelinesResponse() ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudDeployClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + request = cloud_deploy.ListDeliveryPipelinesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + + client.list_delivery_pipelines( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # It is an error to provide scopes and a transport instance. - transport = transports.CloudDeployGrpcTransport( + pre.assert_called_once() + post.assert_called_once() + + +def test_list_delivery_pipelines_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListDeliveryPipelinesRequest +): + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = CloudDeployClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudDeployGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_delivery_pipelines(request) + + +def test_list_delivery_pipelines_rest_flattened(): + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = CloudDeployClient(transport=transport) - assert client.transport is transport + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListDeliveryPipelinesResponse() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudDeployGrpcTransport( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListDeliveryPipelinesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_delivery_pipelines(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_list_delivery_pipelines_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel - transport = transports.CloudDeployGrpcAsyncIOTransport( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_delivery_pipelines( + cloud_deploy.ListDeliveryPipelinesRequest(), + parent="parent_value", + ) + + +def test_list_delivery_pipelines_rest_pager(transport: str = "rest"): + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="abc", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[], + next_page_token="def", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListDeliveryPipelinesResponse( + delivery_pipelines=[ + cloud_deploy.DeliveryPipeline(), + cloud_deploy.DeliveryPipeline(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_deploy.ListDeliveryPipelinesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudDeployGrpcTransport, - transports.CloudDeployGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_delivery_pipelines(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) + + pages = list(client.list_delivery_pipelines(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + cloud_deploy.GetDeliveryPipelineRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = CloudDeployClient.get_transport_class(transport_name)( +def test_get_delivery_pipeline_rest(request_type): + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline( + name="name_value", + uid="uid_value", + description="description_value", + etag="etag_value", + suspended=True, + serial_pipeline=cloud_deploy.SerialPipeline( + stages=[cloud_deploy.Stage(target_id="target_id_value")] + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.DeliveryPipeline) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.suspended is True + + +def test_get_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.GetDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudDeployGrpcTransport, - ) + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -def test_cloud_deploy_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudDeployTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + pb_return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -def test_cloud_deploy_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.CloudDeployTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response = client.get_delivery_pipeline(request) - # Every method on the transport should just blindly + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( + cloud_deploy.GetDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.DeliveryPipeline.to_json( + cloud_deploy.DeliveryPipeline() + ) + + request = cloud_deploy.GetDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.DeliveryPipeline() + + client.get_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_delivery_pipeline(request) + + +def test_get_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.DeliveryPipeline() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.DeliveryPipeline.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_get_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_delivery_pipeline( + cloud_deploy.GetDeliveryPipelineRequest(), + name="name_value", + ) + + +def test_get_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateDeliveryPipelineRequest, + dict, + ], +) +def test_create_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["delivery_pipeline"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": {"standard": {"verify": True}}, + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + }, + "etag": "etag_value", + "suspended": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.CreateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["delivery_pipeline_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "deliveryPipelineId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deliveryPipelineId" in jsonified_request + assert ( + jsonified_request["deliveryPipelineId"] == request_init["delivery_pipeline_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["deliveryPipelineId"] = "delivery_pipeline_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delivery_pipeline_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deliveryPipelineId" in jsonified_request + assert jsonified_request["deliveryPipelineId"] == "delivery_pipeline_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_delivery_pipeline(request) + + expected_params = [ + ( + "deliveryPipelineId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deliveryPipelineId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "deliveryPipelineId", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( + cloud_deploy.CreateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.CreateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["delivery_pipeline"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": {"standard": {"verify": True}}, + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + }, + "etag": "etag_value", + "suspended": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_delivery_pipeline(request) + + +def test_create_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deliveryPipelines" + % client.transport._host, + args[1], + ) + + +def test_create_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_delivery_pipeline( + cloud_deploy.CreateDeliveryPipelineRequest(), + parent="parent_value", + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + delivery_pipeline_id="delivery_pipeline_id_value", + ) + + +def test_create_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.UpdateDeliveryPipelineRequest, + dict, + ], +) +def test_update_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request_init["delivery_pipeline"] = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": {"standard": {"verify": True}}, + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + }, + "etag": "etag_value", + "suspended": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.UpdateDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "deliveryPipeline", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( + cloud_deploy.UpdateDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.UpdateDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + request_init["delivery_pipeline"] = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": {"standard": {"verify": True}}, + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": ["missing_targets_value1", "missing_targets_value2"], + "update_time": {}, + }, + }, + "etag": "etag_value", + "suspended": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_delivery_pipeline(request) + + +def test_update_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "delivery_pipeline": { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{delivery_pipeline.name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_update_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_delivery_pipeline( + cloud_deploy.UpdateDeliveryPipelineRequest(), + delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.DeleteDeliveryPipelineRequest, + dict, + ], +) +def test_delete_delivery_pipeline_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_delivery_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_delivery_pipeline_rest_required_fields( + request_type=cloud_deploy.DeleteDeliveryPipelineRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_delivery_pipeline._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_delivery_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_delivery_pipeline_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_delivery_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( + cloud_deploy.DeleteDeliveryPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.DeleteDeliveryPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_delivery_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_delivery_pipeline_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteDeliveryPipelineRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_delivery_pipeline(request) + + +def test_delete_delivery_pipeline_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_delivery_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_delivery_pipeline_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_delivery_pipeline( + cloud_deploy.DeleteDeliveryPipelineRequest(), + name="name_value", + ) + + +def test_delete_delivery_pipeline_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListTargetsRequest, + dict, + ], +) +def test_list_targets_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListTargetsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListTargetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_targets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_targets_rest_required_fields( + request_type=cloud_deploy.ListTargetsRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_targets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_targets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListTargetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.ListTargetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_targets(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_targets_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_targets._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_targets_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_targets" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_targets" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListTargetsRequest.pb( + cloud_deploy.ListTargetsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListTargetsResponse.to_json( + cloud_deploy.ListTargetsResponse() + ) + + request = cloud_deploy.ListTargetsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListTargetsResponse() + + client.list_targets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_targets_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListTargetsRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_targets(request) + + +def test_list_targets_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListTargetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListTargetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_targets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + args[1], + ) + + +def test_list_targets_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_targets( + cloud_deploy.ListTargetsRequest(), + parent="parent_value", + ) + + +def test_list_targets_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + next_page_token="abc", + ), + cloud_deploy.ListTargetsResponse( + targets=[], + next_page_token="def", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListTargetsResponse( + targets=[ + cloud_deploy.Target(), + cloud_deploy.Target(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListTargetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_targets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Target) for i in results) + + pages = list(client.list_targets(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetTargetRequest, + dict, + ], +) +def test_get_target_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Target( + name="name_value", + target_id="target_id_value", + uid="uid_value", + description="description_value", + require_approval=True, + etag="etag_value", + gke=cloud_deploy.GkeCluster(cluster="cluster_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.Target.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_target(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.Target) + assert response.name == "name_value" + assert response.target_id == "target_id_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.require_approval is True + assert response.etag == "etag_value" + + +def test_get_target_rest_required_fields(request_type=cloud_deploy.GetTargetRequest): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Target() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.Target.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_target(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_target_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_target_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_target" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_target" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.Target.to_json(cloud_deploy.Target()) + + request = cloud_deploy.GetTargetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.Target() + + client.get_target( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetTargetRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_target(request) + + +def test_get_target_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Target() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.Target.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_target(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + args[1], + ) + + +def test_get_target_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_target( + cloud_deploy.GetTargetRequest(), + name="name_value", + ) + + +def test_get_target_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateTargetRequest, + dict, + ], +) +def test_create_target_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["target"] = { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": {"cluster": "cluster_value", "internal_ip": True}, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_target(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_target_rest_required_fields( + request_type=cloud_deploy.CreateTargetRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["target_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "targetId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == request_init["target_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["targetId"] = "target_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "target_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "targetId" in jsonified_request + assert jsonified_request["targetId"] == "target_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_target(request) + + expected_params = [ + ( + "targetId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_target_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_target._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "targetId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "targetId", + "target", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_target_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_target" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_create_target" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.CreateTargetRequest.pb( + cloud_deploy.CreateTargetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.CreateTargetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_target( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateTargetRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["target"] = { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": {"cluster": "cluster_value", "internal_ip": True}, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_target(request) + + +def test_create_target_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_target(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/targets" % client.transport._host, + args[1], + ) + + +def test_create_target_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_target( + cloud_deploy.CreateTargetRequest(), + parent="parent_value", + target=cloud_deploy.Target(name="name_value"), + target_id="target_id_value", + ) + + +def test_create_target_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.UpdateTargetRequest, + dict, + ], +) +def test_update_target_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } + request_init["target"] = { + "name": "projects/sample1/locations/sample2/targets/sample3", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": {"cluster": "cluster_value", "internal_ip": True}, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_target(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_target_rest_required_fields( + request_type=cloud_deploy.UpdateTargetRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_target(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_target_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_target._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "target", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_target_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_target" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_update_target" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.UpdateTargetRequest.pb( + cloud_deploy.UpdateTargetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.UpdateTargetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_target( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.UpdateTargetRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } + request_init["target"] = { + "name": "projects/sample1/locations/sample2/targets/sample3", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "gke": {"cluster": "cluster_value", "internal_ip": True}, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_target(request) + + +def test_update_target_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "target": {"name": "projects/sample1/locations/sample2/targets/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_target(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{target.name=projects/*/locations/*/targets/*}" + % client.transport._host, + args[1], + ) + + +def test_update_target_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_target( + cloud_deploy.UpdateTargetRequest(), + target=cloud_deploy.Target(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_target_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.DeleteTargetRequest, + dict, + ], +) +def test_delete_target_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_target(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_target_rest_required_fields( + request_type=cloud_deploy.DeleteTargetRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_target(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_target_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_target._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_target_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_target" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_delete_target" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.DeleteTargetRequest.pb( + cloud_deploy.DeleteTargetRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.DeleteTargetRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_target( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_target_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.DeleteTargetRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/targets/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_target(request) + + +def test_delete_target_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/targets/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_target(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/targets/*}" % client.transport._host, + args[1], + ) + + +def test_delete_target_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_target( + cloud_deploy.DeleteTargetRequest(), + name="name_value", + ) + + +def test_delete_target_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListReleasesRequest, + dict, + ], +) +def test_list_releases_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListReleasesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListReleasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_releases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReleasesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_releases_rest_required_fields( + request_type=cloud_deploy.ListReleasesRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_releases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_releases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListReleasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.ListReleasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_releases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_releases_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_releases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_releases_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_releases" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_releases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListReleasesRequest.pb( + cloud_deploy.ListReleasesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListReleasesResponse.to_json( + cloud_deploy.ListReleasesResponse() + ) + + request = cloud_deploy.ListReleasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListReleasesResponse() + + client.list_releases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_releases_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListReleasesRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_releases(request) + + +def test_list_releases_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListReleasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListReleasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_releases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + % client.transport._host, + args[1], + ) + + +def test_list_releases_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_releases( + cloud_deploy.ListReleasesRequest(), + parent="parent_value", + ) + + +def test_list_releases_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + next_page_token="abc", + ), + cloud_deploy.ListReleasesResponse( + releases=[], + next_page_token="def", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListReleasesResponse( + releases=[ + cloud_deploy.Release(), + cloud_deploy.Release(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListReleasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + pager = client.list_releases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Release) for i in results) + + pages = list(client.list_releases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetReleaseRequest, + dict, + ], +) +def test_get_release_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Release( + name="name_value", + uid="uid_value", + description="description_value", + abandoned=True, + skaffold_config_uri="skaffold_config_uri_value", + skaffold_config_path="skaffold_config_path_value", + render_state=cloud_deploy.Release.RenderState.SUCCEEDED, + etag="etag_value", + skaffold_version="skaffold_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.Release.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_release(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.Release) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.abandoned is True + assert response.skaffold_config_uri == "skaffold_config_uri_value" + assert response.skaffold_config_path == "skaffold_config_path_value" + assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED + assert response.etag == "etag_value" + assert response.skaffold_version == "skaffold_version_value" + + +def test_get_release_rest_required_fields(request_type=cloud_deploy.GetReleaseRequest): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_release._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_release._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Release() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.Release.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_release(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_release_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_release_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_release" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_release" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.Release.to_json(cloud_deploy.Release()) + + request = cloud_deploy.GetReleaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.Release() + + client.get_release( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetReleaseRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_release(request) + + +def test_get_release_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Release() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.Release.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_release(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}" + % client.transport._host, + args[1], + ) + + +def test_get_release_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_release( + cloud_deploy.GetReleaseRequest(), + name="name_value", + ) + + +def test_get_release_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateReleaseRequest, + dict, + ], +) +def test_create_release_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request_init["release"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "abandoned": True, + "create_time": {"seconds": 751, "nanos": 543}, + "render_start_time": {}, + "render_end_time": {}, + "skaffold_config_uri": "skaffold_config_uri_value", + "skaffold_config_path": "skaffold_config_path_value", + "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], + "delivery_pipeline_snapshot": { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": {"standard": {"verify": True}}, + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": [ + "missing_targets_value1", + "missing_targets_value2", + ], + "update_time": {}, + }, + }, + "etag": "etag_value", + "suspended": True, + }, + "target_snapshots": [ + { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {}, + "update_time": {}, + "gke": {"cluster": "cluster_value", "internal_ip": True}, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + } + ], + "render_state": 1, + "etag": "etag_value", + "skaffold_version": "skaffold_version_value", + "target_artifacts": {}, + "target_renders": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_release(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_release_rest_required_fields( + request_type=cloud_deploy.CreateReleaseRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["release_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "releaseId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_release._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == request_init["release_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["releaseId"] = "release_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_release._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "release_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "releaseId" in jsonified_request + assert jsonified_request["releaseId"] == "release_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_release(request) + + expected_params = [ + ( + "releaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_release_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_release._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "releaseId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "releaseId", + "release", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_release_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_release" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_create_release" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.CreateReleaseRequest.pb( + cloud_deploy.CreateReleaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.CreateReleaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_release( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateReleaseRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + request_init["release"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "abandoned": True, + "create_time": {"seconds": 751, "nanos": 543}, + "render_start_time": {}, + "render_end_time": {}, + "skaffold_config_uri": "skaffold_config_uri_value", + "skaffold_config_path": "skaffold_config_path_value", + "build_artifacts": [{"image": "image_value", "tag": "tag_value"}], + "delivery_pipeline_snapshot": { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {}, + "update_time": {}, + "serial_pipeline": { + "stages": [ + { + "target_id": "target_id_value", + "profiles": ["profiles_value1", "profiles_value2"], + "strategy": {"standard": {"verify": True}}, + } + ] + }, + "condition": { + "pipeline_ready_condition": {"status": True, "update_time": {}}, + "targets_present_condition": { + "status": True, + "missing_targets": [ + "missing_targets_value1", + "missing_targets_value2", + ], + "update_time": {}, + }, + }, + "etag": "etag_value", + "suspended": True, + }, + "target_snapshots": [ + { + "name": "name_value", + "target_id": "target_id_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "require_approval": True, + "create_time": {}, + "update_time": {}, + "gke": {"cluster": "cluster_value", "internal_ip": True}, + "anthos_cluster": {"membership": "membership_value"}, + "run": {"location": "location_value"}, + "etag": "etag_value", + "execution_configs": [ + { + "usages": [1], + "default_pool": { + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "private_pool": { + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + }, + "worker_pool": "worker_pool_value", + "service_account": "service_account_value", + "artifact_storage": "artifact_storage_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + } + ], + "render_state": 1, + "etag": "etag_value", + "skaffold_version": "skaffold_version_value", + "target_artifacts": {}, + "target_renders": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_release(request) + + +def test_create_release_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_release(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*}/releases" + % client.transport._host, + args[1], + ) + + +def test_create_release_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_release( + cloud_deploy.CreateReleaseRequest(), + parent="parent_value", + release=cloud_deploy.Release(name="name_value"), + release_id="release_id_value", + ) + + +def test_create_release_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.AbandonReleaseRequest, + dict, + ], +) +def test_abandon_release_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.AbandonReleaseResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.abandon_release(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.AbandonReleaseResponse) + + +def test_abandon_release_rest_required_fields( + request_type=cloud_deploy.AbandonReleaseRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).abandon_release._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).abandon_release._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.AbandonReleaseResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.abandon_release(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_abandon_release_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.abandon_release._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_abandon_release_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_abandon_release" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_abandon_release" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.AbandonReleaseRequest.pb( + cloud_deploy.AbandonReleaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.AbandonReleaseResponse.to_json( + cloud_deploy.AbandonReleaseResponse() + ) + + request = cloud_deploy.AbandonReleaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.AbandonReleaseResponse() + + client.abandon_release( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_abandon_release_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.AbandonReleaseRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.abandon_release(request) + + +def test_abandon_release_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.AbandonReleaseResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.AbandonReleaseResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.abandon_release(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*}:abandon" + % client.transport._host, + args[1], + ) + + +def test_abandon_release_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.abandon_release( + cloud_deploy.AbandonReleaseRequest(), + name="name_value", + ) + + +def test_abandon_release_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ApproveRolloutRequest, + dict, + ], +) +def test_approve_rollout_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ApproveRolloutResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ApproveRolloutResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.approve_rollout(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.ApproveRolloutResponse) + + +def test_approve_rollout_rest_required_fields( + request_type=cloud_deploy.ApproveRolloutRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request_init["approved"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).approve_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["approved"] = True + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).approve_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "approved" in jsonified_request + assert jsonified_request["approved"] == True + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ApproveRolloutResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.ApproveRolloutResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.approve_rollout(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_approve_rollout_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.approve_rollout._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "approved", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_approve_rollout_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_approve_rollout" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_approve_rollout" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ApproveRolloutRequest.pb( + cloud_deploy.ApproveRolloutRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ApproveRolloutResponse.to_json( + cloud_deploy.ApproveRolloutResponse() + ) + + request = cloud_deploy.ApproveRolloutRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ApproveRolloutResponse() + + client.approve_rollout( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_approve_rollout_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ApproveRolloutRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.approve_rollout(request) + + +def test_approve_rollout_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ApproveRolloutResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ApproveRolloutResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.approve_rollout(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*}:approve" + % client.transport._host, + args[1], + ) + + +def test_approve_rollout_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.approve_rollout( + cloud_deploy.ApproveRolloutRequest(), + name="name_value", + ) + + +def test_approve_rollout_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListRolloutsRequest, + dict, + ], +) +def test_list_rollouts_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListRolloutsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListRolloutsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_rollouts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRolloutsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_rollouts_rest_required_fields( + request_type=cloud_deploy.ListRolloutsRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_rollouts._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_rollouts._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListRolloutsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.ListRolloutsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_rollouts(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rollouts_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_rollouts._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rollouts_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_rollouts" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_rollouts" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListRolloutsRequest.pb( + cloud_deploy.ListRolloutsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListRolloutsResponse.to_json( + cloud_deploy.ListRolloutsResponse() + ) + + request = cloud_deploy.ListRolloutsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListRolloutsResponse() + + client.list_rollouts( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rollouts_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListRolloutsRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_rollouts(request) + + +def test_list_rollouts_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListRolloutsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListRolloutsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_rollouts(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*/releases/*}/rollouts" + % client.transport._host, + args[1], + ) + + +def test_list_rollouts_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_rollouts( + cloud_deploy.ListRolloutsRequest(), + parent="parent_value", + ) + + +def test_list_rollouts_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + ], + next_page_token="abc", + ), + cloud_deploy.ListRolloutsResponse( + rollouts=[], + next_page_token="def", + ), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListRolloutsResponse( + rollouts=[ + cloud_deploy.Rollout(), + cloud_deploy.Rollout(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListRolloutsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + + pager = client.list_rollouts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.Rollout) for i in results) + + pages = list(client.list_rollouts(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetRolloutRequest, + dict, + ], +) +def test_get_rollout_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Rollout( + name="name_value", + uid="uid_value", + description="description_value", + target_id="target_id_value", + approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, + state=cloud_deploy.Rollout.State.SUCCEEDED, + failure_reason="failure_reason_value", + deploying_build="deploying_build_value", + etag="etag_value", + deploy_failure_cause=cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.Rollout.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_rollout(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.Rollout) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.target_id == "target_id_value" + assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL + assert response.state == cloud_deploy.Rollout.State.SUCCEEDED + assert response.failure_reason == "failure_reason_value" + assert response.deploying_build == "deploying_build_value" + assert response.etag == "etag_value" + assert ( + response.deploy_failure_cause + == cloud_deploy.Rollout.FailureCause.CLOUD_BUILD_UNAVAILABLE + ) + + +def test_get_rollout_rest_required_fields(request_type=cloud_deploy.GetRolloutRequest): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Rollout() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.Rollout.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_rollout(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rollout_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_rollout._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rollout_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_rollout" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_rollout" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetRolloutRequest.pb(cloud_deploy.GetRolloutRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.Rollout.to_json(cloud_deploy.Rollout()) + + request = cloud_deploy.GetRolloutRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.Rollout() + + client.get_rollout( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rollout_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetRolloutRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rollout(request) + + +def test_get_rollout_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Rollout() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.Rollout.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_rollout(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*}" + % client.transport._host, + args[1], + ) + + +def test_get_rollout_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rollout( + cloud_deploy.GetRolloutRequest(), + name="name_value", + ) + + +def test_get_rollout_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.CreateRolloutRequest, + dict, + ], +) +def test_create_rollout_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + request_init["rollout"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "approve_time": {}, + "enqueue_time": {}, + "deploy_start_time": {}, + "deploy_end_time": {}, + "target_id": "target_id_value", + "approval_state": 1, + "state": 1, + "failure_reason": "failure_reason_value", + "deploying_build": "deploying_build_value", + "etag": "etag_value", + "deploy_failure_cause": 1, + "phases": [ + { + "id": "id_value", + "state": 1, + "deployment_jobs": { + "deploy_job": { + "id": "id_value", + "state": 1, + "job_run": "job_run_value", + "deploy_job": {}, + "verify_job": {}, + }, + "verify_job": {}, + }, + } + ], + "metadata": { + "cloud_run": { + "service": "service_value", + "service_urls": ["service_urls_value1", "service_urls_value2"], + "revision": "revision_value", + } + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_rollout(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_rollout_rest_required_fields( + request_type=cloud_deploy.CreateRolloutRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["rollout_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "rolloutId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_rollout._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "rolloutId" in jsonified_request + assert jsonified_request["rolloutId"] == request_init["rollout_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["rolloutId"] = "rollout_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_rollout._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "rollout_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "rolloutId" in jsonified_request + assert jsonified_request["rolloutId"] == "rollout_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_rollout(request) + + expected_params = [ + ( + "rolloutId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_rollout_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_rollout._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "rolloutId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "rolloutId", + "rollout", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_rollout_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_rollout" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_create_rollout" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.CreateRolloutRequest.pb( + cloud_deploy.CreateRolloutRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_deploy.CreateRolloutRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_rollout( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_rollout_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.CreateRolloutRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + request_init["rollout"] = { + "name": "name_value", + "uid": "uid_value", + "description": "description_value", + "annotations": {}, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "approve_time": {}, + "enqueue_time": {}, + "deploy_start_time": {}, + "deploy_end_time": {}, + "target_id": "target_id_value", + "approval_state": 1, + "state": 1, + "failure_reason": "failure_reason_value", + "deploying_build": "deploying_build_value", + "etag": "etag_value", + "deploy_failure_cause": 1, + "phases": [ + { + "id": "id_value", + "state": 1, + "deployment_jobs": { + "deploy_job": { + "id": "id_value", + "state": 1, + "job_run": "job_run_value", + "deploy_job": {}, + "verify_job": {}, + }, + "verify_job": {}, + }, + } + ], + "metadata": { + "cloud_run": { + "service": "service_value", + "service_urls": ["service_urls_value1", "service_urls_value2"], + "revision": "revision_value", + } + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_rollout(request) + + +def test_create_rollout_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_rollout(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*/releases/*}/rollouts" + % client.transport._host, + args[1], + ) + + +def test_create_rollout_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_rollout( + cloud_deploy.CreateRolloutRequest(), + parent="parent_value", + rollout=cloud_deploy.Rollout(name="name_value"), + rollout_id="rollout_id_value", + ) + + +def test_create_rollout_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.RetryJobRequest, + dict, + ], +) +def test_retry_job_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "rollout": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RetryJobResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.RetryJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retry_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.RetryJobResponse) + + +def test_retry_job_rest_required_fields(request_type=cloud_deploy.RetryJobRequest): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["rollout"] = "" + request_init["phase_id"] = "" + request_init["job_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retry_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["rollout"] = "rollout_value" + jsonified_request["phaseId"] = "phase_id_value" + jsonified_request["jobId"] = "job_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retry_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "rollout" in jsonified_request + assert jsonified_request["rollout"] == "rollout_value" + assert "phaseId" in jsonified_request + assert jsonified_request["phaseId"] == "phase_id_value" + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == "job_id_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RetryJobResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.RetryJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retry_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retry_job_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retry_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "rollout", + "phaseId", + "jobId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retry_job_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_retry_job" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_retry_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.RetryJobRequest.pb(cloud_deploy.RetryJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.RetryJobResponse.to_json( + cloud_deploy.RetryJobResponse() + ) + + request = cloud_deploy.RetryJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.RetryJobResponse() + + client.retry_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retry_job_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.RetryJobRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "rollout": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retry_job(request) + + +def test_retry_job_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.RetryJobResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "rollout": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.RetryJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retry_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{rollout=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*}:retryJob" + % client.transport._host, + args[1], + ) + + +def test_retry_job_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retry_job( + cloud_deploy.RetryJobRequest(), + rollout="rollout_value", + phase_id="phase_id_value", + job_id="job_id_value", + ) + + +def test_retry_job_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.ListJobRunsRequest, + dict, + ], +) +def test_list_job_runs_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListJobRunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListJobRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_job_runs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobRunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_job_runs_rest_required_fields( + request_type=cloud_deploy.ListJobRunsRequest, +): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_job_runs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_job_runs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListJobRunsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.ListJobRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_job_runs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_job_runs_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_job_runs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_runs_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_job_runs" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_list_job_runs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.ListJobRunsRequest.pb( + cloud_deploy.ListJobRunsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.ListJobRunsResponse.to_json( + cloud_deploy.ListJobRunsResponse() + ) + + request = cloud_deploy.ListJobRunsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.ListJobRunsResponse() + + client.list_job_runs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_runs_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.ListJobRunsRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_runs(request) + + +def test_list_job_runs_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.ListJobRunsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.ListJobRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_job_runs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*}/jobRuns" + % client.transport._host, + args[1], + ) + + +def test_list_job_runs_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_runs( + cloud_deploy.ListJobRunsRequest(), + parent="parent_value", + ) + + +def test_list_job_runs_rest_pager(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + next_page_token="abc", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[], + next_page_token="def", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + ], + next_page_token="ghi", + ), + cloud_deploy.ListJobRunsResponse( + job_runs=[ + cloud_deploy.JobRun(), + cloud_deploy.JobRun(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_deploy.ListJobRunsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5" + } + + pager = client.list_job_runs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_deploy.JobRun) for i in results) + + pages = list(client.list_job_runs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetJobRunRequest, + dict, + ], +) +def test_get_job_run_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5/jobRuns/sample6" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.JobRun( + name="name_value", + uid="uid_value", + phase_id="phase_id_value", + job_id="job_id_value", + state=cloud_deploy.JobRun.State.IN_PROGRESS, + etag="etag_value", + deploy_job_run=cloud_deploy.DeployJobRun(build="build_value"), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.JobRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job_run(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.JobRun) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.phase_id == "phase_id_value" + assert response.job_id == "job_id_value" + assert response.state == cloud_deploy.JobRun.State.IN_PROGRESS + assert response.etag == "etag_value" + + +def test_get_job_run_rest_required_fields(request_type=cloud_deploy.GetJobRunRequest): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.JobRun() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.JobRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_job_run(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_job_run_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_job_run._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_run_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_job_run" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_job_run" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetJobRunRequest.pb(cloud_deploy.GetJobRunRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.JobRun.to_json(cloud_deploy.JobRun()) + + request = cloud_deploy.GetJobRunRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.JobRun() + + client.get_job_run( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_run_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetJobRunRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5/jobRuns/sample6" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_run(request) + + +def test_get_job_run_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.JobRun() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deliveryPipelines/sample3/releases/sample4/rollouts/sample5/jobRuns/sample6" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.JobRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_job_run(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deliveryPipelines/*/releases/*/rollouts/*/jobRuns/*}" + % client.transport._host, + args[1], + ) + + +def test_get_job_run_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_run( + cloud_deploy.GetJobRunRequest(), + name="name_value", + ) + + +def test_get_job_run_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_deploy.GetConfigRequest, + dict, + ], +) +def test_get_config_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/config"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Config( + name="name_value", + default_skaffold_version="default_skaffold_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.Config.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_deploy.Config) + assert response.name == "name_value" + assert response.default_skaffold_version == "default_skaffold_version_value" + + +def test_get_config_rest_required_fields(request_type=cloud_deploy.GetConfigRequest): + transport_class = transports.CloudDeployRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Config() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_deploy.Config.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_config_rest_unset_required_fields(): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_config_rest_interceptors(null_interceptor): + transport = transports.CloudDeployRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudDeployRestInterceptor(), + ) + client = CloudDeployClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_config" + ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "pre_get_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_deploy.GetConfigRequest.pb(cloud_deploy.GetConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_deploy.Config.to_json(cloud_deploy.Config()) + + request = cloud_deploy.GetConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_deploy.Config() + + client.get_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_config_rest_bad_request( + transport: str = "rest", request_type=cloud_deploy.GetConfigRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/config"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_config(request) + + +def test_get_config_rest_flattened(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_deploy.Config() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/config"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_deploy.Config.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/config}" % client.transport._host, + args[1], + ) + + +def test_get_config_rest_flattened_error(transport: str = "rest"): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_config( + cloud_deploy.GetConfigRequest(), + name="name_value", + ) + + +def test_get_config_rest_error(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudDeployGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudDeployGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudDeployClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudDeployGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudDeployClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudDeployClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudDeployGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudDeployClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudDeployGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudDeployClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudDeployGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudDeployGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudDeployGrpcTransport, + transports.CloudDeployGrpcAsyncIOTransport, + transports.CloudDeployRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudDeployClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudDeployGrpcTransport, + ) + + +def test_cloud_deploy_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudDeployTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_deploy_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudDeployTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly # raise NotImplementedError. methods = ( "list_delivery_pipelines", @@ -7262,6 +14436,7 @@ def test_cloud_deploy_transport_auth_adc(transport_class): [ transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport, + transports.CloudDeployRestTransport, ], ) def test_cloud_deploy_transport_auth_gdch_credentials(transport_class): @@ -7356,11 +14531,40 @@ def test_cloud_deploy_grpc_transport_client_cert_source_for_mtls(transport_class ) +def test_cloud_deploy_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudDeployRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_cloud_deploy_rest_lro_client(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cloud_deploy_host_no_port(transport_name): @@ -7371,7 +14575,11 @@ def test_cloud_deploy_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("clouddeploy.googleapis.com:443") + assert client.transport._host == ( + "clouddeploy.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouddeploy.googleapis.com" + ) @pytest.mark.parametrize( @@ -7379,6 +14587,7 @@ def test_cloud_deploy_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cloud_deploy_host_with_port(transport_name): @@ -7389,7 +14598,96 @@ def test_cloud_deploy_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("clouddeploy.googleapis.com:8000") + assert client.transport._host == ( + "clouddeploy.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouddeploy.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_deploy_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudDeployClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudDeployClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_delivery_pipelines._session + session2 = client2.transport.list_delivery_pipelines._session + assert session1 != session2 + session1 = client1.transport.get_delivery_pipeline._session + session2 = client2.transport.get_delivery_pipeline._session + assert session1 != session2 + session1 = client1.transport.create_delivery_pipeline._session + session2 = client2.transport.create_delivery_pipeline._session + assert session1 != session2 + session1 = client1.transport.update_delivery_pipeline._session + session2 = client2.transport.update_delivery_pipeline._session + assert session1 != session2 + session1 = client1.transport.delete_delivery_pipeline._session + session2 = client2.transport.delete_delivery_pipeline._session + assert session1 != session2 + session1 = client1.transport.list_targets._session + session2 = client2.transport.list_targets._session + assert session1 != session2 + session1 = client1.transport.get_target._session + session2 = client2.transport.get_target._session + assert session1 != session2 + session1 = client1.transport.create_target._session + session2 = client2.transport.create_target._session + assert session1 != session2 + session1 = client1.transport.update_target._session + session2 = client2.transport.update_target._session + assert session1 != session2 + session1 = client1.transport.delete_target._session + session2 = client2.transport.delete_target._session + assert session1 != session2 + session1 = client1.transport.list_releases._session + session2 = client2.transport.list_releases._session + assert session1 != session2 + session1 = client1.transport.get_release._session + session2 = client2.transport.get_release._session + assert session1 != session2 + session1 = client1.transport.create_release._session + session2 = client2.transport.create_release._session + assert session1 != session2 + session1 = client1.transport.abandon_release._session + session2 = client2.transport.abandon_release._session + assert session1 != session2 + session1 = client1.transport.approve_rollout._session + session2 = client2.transport.approve_rollout._session + assert session1 != session2 + session1 = client1.transport.list_rollouts._session + session2 = client2.transport.list_rollouts._session + assert session1 != session2 + session1 = client1.transport.get_rollout._session + session2 = client2.transport.get_rollout._session + assert session1 != session2 + session1 = client1.transport.create_rollout._session + session2 = client2.transport.create_rollout._session + assert session1 != session2 + session1 = client1.transport.retry_job._session + session2 = client2.transport.retry_job._session + assert session1 != session2 + session1 = client1.transport.list_job_runs._session + session2 = client2.transport.list_job_runs._session + assert session1 != session2 + session1 = client1.transport.get_job_run._session + session2 = client2.transport.get_job_run._session + assert session1 != session2 + session1 = client1.transport.get_config._session + session2 = client2.transport.get_config._session + assert session1 != session2 def test_cloud_deploy_grpc_transport_channel(): @@ -7495,480 +14793,1009 @@ def test_cloud_deploy_transport_channel_mtls_with_adc(transport_class): client_cert_source=None, ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cloud_deploy_grpc_lro_client(): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_deploy_grpc_lro_async_client(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_build_path(): + project = "squid" + location = "clam" + build = "whelk" + expected = "projects/{project}/locations/{location}/builds/{build}".format( + project=project, + location=location, + build=build, + ) + actual = CloudDeployClient.build_path(project, location, build) + assert expected == actual + + +def test_parse_build_path(): + expected = { + "project": "octopus", + "location": "oyster", + "build": "nudibranch", + } + path = CloudDeployClient.build_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_build_path(path) + assert expected == actual + + +def test_cluster_path(): + project = "cuttlefish" + location = "mussel" + cluster = "winkle" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + actual = CloudDeployClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cluster": "abalone", + } + path = CloudDeployClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_cluster_path(path) + assert expected == actual + + +def test_config_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}/config".format( + project=project, + location=location, + ) + actual = CloudDeployClient.config_path(project, location) + assert expected == actual + + +def test_parse_config_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = CloudDeployClient.config_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_config_path(path) + assert expected == actual + + +def test_delivery_pipeline_path(): + project = "oyster" + location = "nudibranch" + delivery_pipeline = "cuttlefish" + expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}".format( + project=project, + location=location, + delivery_pipeline=delivery_pipeline, + ) + actual = CloudDeployClient.delivery_pipeline_path( + project, location, delivery_pipeline + ) + assert expected == actual + + +def test_parse_delivery_pipeline_path(): + expected = { + "project": "mussel", + "location": "winkle", + "delivery_pipeline": "nautilus", + } + path = CloudDeployClient.delivery_pipeline_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_delivery_pipeline_path(path) + assert expected == actual + + +def test_job_run_path(): + project = "scallop" + location = "abalone" + delivery_pipeline = "squid" + release = "clam" + rollout = "whelk" + job_run = "octopus" + expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{job_run}".format( + project=project, + location=location, + delivery_pipeline=delivery_pipeline, + release=release, + rollout=rollout, + job_run=job_run, + ) + actual = CloudDeployClient.job_run_path( + project, location, delivery_pipeline, release, rollout, job_run + ) + assert expected == actual + +def test_parse_job_run_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "delivery_pipeline": "cuttlefish", + "release": "mussel", + "rollout": "winkle", + "job_run": "nautilus", + } + path = CloudDeployClient.job_run_path(**expected) -def test_cloud_deploy_grpc_lro_client(): - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_job_run_path(path) + assert expected == actual - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, + +def test_membership_path(): + project = "scallop" + location = "abalone" + membership = "squid" + expected = ( + "projects/{project}/locations/{location}/memberships/{membership}".format( + project=project, + location=location, + membership=membership, + ) ) + actual = CloudDeployClient.membership_path(project, location, membership) + assert expected == actual - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client +def test_parse_membership_path(): + expected = { + "project": "clam", + "location": "whelk", + "membership": "octopus", + } + path = CloudDeployClient.membership_path(**expected) -def test_cloud_deploy_grpc_lro_async_client(): - client = CloudDeployAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_membership_path(path) + assert expected == actual - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, + +def test_release_path(): + project = "oyster" + location = "nudibranch" + delivery_pipeline = "cuttlefish" + release = "mussel" + expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format( + project=project, + location=location, + delivery_pipeline=delivery_pipeline, + release=release, + ) + actual = CloudDeployClient.release_path( + project, location, delivery_pipeline, release ) + assert expected == actual - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + +def test_parse_release_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "delivery_pipeline": "scallop", + "release": "abalone", + } + path = CloudDeployClient.release_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_release_path(path) + assert expected == actual -def test_build_path(): +def test_rollout_path(): project = "squid" location = "clam" - build = "whelk" - expected = "projects/{project}/locations/{location}/builds/{build}".format( + delivery_pipeline = "whelk" + release = "octopus" + rollout = "oyster" + expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format( project=project, location=location, - build=build, + delivery_pipeline=delivery_pipeline, + release=release, + rollout=rollout, + ) + actual = CloudDeployClient.rollout_path( + project, location, delivery_pipeline, release, rollout ) - actual = CloudDeployClient.build_path(project, location, build) assert expected == actual -def test_parse_build_path(): +def test_parse_rollout_path(): expected = { - "project": "octopus", - "location": "oyster", - "build": "nudibranch", + "project": "nudibranch", + "location": "cuttlefish", + "delivery_pipeline": "mussel", + "release": "winkle", + "rollout": "nautilus", } - path = CloudDeployClient.build_path(**expected) + path = CloudDeployClient.rollout_path(**expected) # Check that the path construction is reversible. - actual = CloudDeployClient.parse_build_path(path) + actual = CloudDeployClient.parse_rollout_path(path) assert expected == actual -def test_cluster_path(): - project = "cuttlefish" - location = "mussel" - cluster = "winkle" - expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( +def test_target_path(): + project = "scallop" + location = "abalone" + target = "squid" + expected = "projects/{project}/locations/{location}/targets/{target}".format( project=project, location=location, - cluster=cluster, + target=target, ) - actual = CloudDeployClient.cluster_path(project, location, cluster) + actual = CloudDeployClient.target_path(project, location, target) assert expected == actual -def test_parse_cluster_path(): +def test_parse_target_path(): expected = { - "project": "nautilus", - "location": "scallop", - "cluster": "abalone", + "project": "clam", + "location": "whelk", + "target": "octopus", } - path = CloudDeployClient.cluster_path(**expected) + path = CloudDeployClient.target_path(**expected) # Check that the path construction is reversible. - actual = CloudDeployClient.parse_cluster_path(path) + actual = CloudDeployClient.parse_target_path(path) assert expected == actual -def test_config_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}/config".format( - project=project, - location=location, +def test_worker_pool_path(): + project = "oyster" + location = "nudibranch" + worker_pool = "cuttlefish" + expected = ( + "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( + project=project, + location=location, + worker_pool=worker_pool, + ) ) - actual = CloudDeployClient.config_path(project, location) + actual = CloudDeployClient.worker_pool_path(project, location, worker_pool) assert expected == actual -def test_parse_config_path(): +def test_parse_worker_pool_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "mussel", + "location": "winkle", + "worker_pool": "nautilus", } - path = CloudDeployClient.config_path(**expected) + path = CloudDeployClient.worker_pool_path(**expected) # Check that the path construction is reversible. - actual = CloudDeployClient.parse_config_path(path) + actual = CloudDeployClient.parse_worker_pool_path(path) assert expected == actual -def test_delivery_pipeline_path(): - project = "oyster" - location = "nudibranch" - delivery_pipeline = "cuttlefish" - expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}".format( - project=project, - location=location, - delivery_pipeline=delivery_pipeline, +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - actual = CloudDeployClient.delivery_pipeline_path( - project, location, delivery_pipeline + actual = CloudDeployClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = CloudDeployClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CloudDeployClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = CloudDeployClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CloudDeployClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = CloudDeployClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudDeployClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, ) + actual = CloudDeployClient.common_project_path(project) assert expected == actual -def test_parse_delivery_pipeline_path(): +def test_parse_common_project_path(): expected = { - "project": "mussel", - "location": "winkle", - "delivery_pipeline": "nautilus", + "project": "nudibranch", } - path = CloudDeployClient.delivery_pipeline_path(**expected) + path = CloudDeployClient.common_project_path(**expected) # Check that the path construction is reversible. - actual = CloudDeployClient.parse_delivery_pipeline_path(path) + actual = CloudDeployClient.parse_common_project_path(path) assert expected == actual -def test_job_run_path(): - project = "scallop" - location = "abalone" - delivery_pipeline = "squid" - release = "clam" - rollout = "whelk" - job_run = "octopus" - expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}/jobRuns/{job_run}".format( +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( project=project, location=location, - delivery_pipeline=delivery_pipeline, - release=release, - rollout=rollout, - job_run=job_run, - ) - actual = CloudDeployClient.job_run_path( - project, location, delivery_pipeline, release, rollout, job_run ) + actual = CloudDeployClient.common_location_path(project, location) assert expected == actual -def test_parse_job_run_path(): +def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "delivery_pipeline": "cuttlefish", - "release": "mussel", - "rollout": "winkle", - "job_run": "nautilus", + "project": "winkle", + "location": "nautilus", } - path = CloudDeployClient.job_run_path(**expected) + path = CloudDeployClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = CloudDeployClient.parse_job_run_path(path) + actual = CloudDeployClient.parse_common_location_path(path) assert expected == actual -def test_membership_path(): - project = "scallop" - location = "abalone" - membership = "squid" - expected = ( - "projects/{project}/locations/{location}/memberships/{membership}".format( - project=project, - location=location, - membership=membership, +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudDeployTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudDeployTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudDeployClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudDeployAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - actual = CloudDeployClient.membership_path(project, location, membership) - assert expected == actual + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() -def test_parse_membership_path(): - expected = { - "project": "clam", - "location": "whelk", - "membership": "octopus", - } - path = CloudDeployClient.membership_path(**expected) +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_membership_path(path) - assert expected == actual + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_release_path(): - project = "oyster" - location = "nudibranch" - delivery_pipeline = "cuttlefish" - release = "mussel" - expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format( - project=project, - location=location, - delivery_pipeline=delivery_pipeline, - release=release, +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - actual = CloudDeployClient.release_path( - project, location, delivery_pipeline, release + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert expected == actual + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -def test_parse_release_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "delivery_pipeline": "scallop", - "release": "abalone", + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/deliveryPipelines/sample3"}, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - path = CloudDeployClient.release_path(**expected) + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_release_path(path) - assert expected == actual + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -def test_rollout_path(): - project = "squid" - location = "clam" - delivery_pipeline = "whelk" - release = "octopus" - rollout = "oyster" - expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format( - project=project, - location=location, - delivery_pipeline=delivery_pipeline, - release=release, - rollout=rollout, + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = CloudDeployClient.rollout_path( - project, location, delivery_pipeline, release, rollout + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/deliveryPipelines/sample3"}, + request, ) - assert expected == actual + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) -def test_parse_rollout_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "delivery_pipeline": "mussel", - "release": "winkle", - "rollout": "nautilus", + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - path = CloudDeployClient.rollout_path(**expected) + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_rollout_path(path) - assert expected == actual +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) -def test_target_path(): - project = "scallop" - location = "abalone" - target = "squid" - expected = "projects/{project}/locations/{location}/targets/{target}".format( - project=project, - location=location, - target=target, + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/deliveryPipelines/sample3"}, + request, ) - actual = CloudDeployClient.target_path(project, location, target) - assert expected == actual + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) -def test_parse_target_path(): - expected = { - "project": "clam", - "location": "whelk", - "target": "octopus", + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/deliveryPipelines/sample3" } - path = CloudDeployClient.target_path(**expected) + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_target_path(path) - assert expected == actual + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -def test_worker_pool_path(): - project = "oyster" - location = "nudibranch" - worker_pool = "cuttlefish" - expected = ( - "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( - project=project, - location=location, - worker_pool=worker_pool, - ) - ) - actual = CloudDeployClient.worker_pool_path(project, location, worker_pool) - assert expected == actual + response = client.test_iam_permissions(request) + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) -def test_parse_worker_pool_path(): - expected = { - "project": "mussel", - "location": "winkle", - "worker_pool": "nautilus", - } - path = CloudDeployClient.worker_pool_path(**expected) - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_worker_pool_path(path) - assert expected == actual +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - actual = CloudDeployClient.common_billing_account_path(billing_account) - assert expected == actual + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = CloudDeployClient.common_billing_account_path(**expected) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_common_billing_account_path(path) - assert expected == actual + response = client.cancel_operation(request) + # Establish that the response is the type that we expect. + assert response is None -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = CloudDeployClient.common_folder_path(folder) - assert expected == actual +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = CloudDeployClient.common_folder_path(**expected) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_common_folder_path(path) - assert expected == actual + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format( - organization=organization, +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - actual = CloudDeployClient.common_organization_path(organization) - assert expected == actual + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = CloudDeployClient.common_organization_path(**expected) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_common_organization_path(path) - assert expected == actual + response = client.delete_operation(request) + # Establish that the response is the type that we expect. + assert response is None -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format( - project=project, - ) - actual = CloudDeployClient.common_project_path(project) - assert expected == actual +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = CloudDeployClient.common_project_path(**expected) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_common_project_path(path) - assert expected == actual + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - actual = CloudDeployClient.common_location_path(project, location) - assert expected == actual + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = CloudDeployClient.common_location_path(**expected) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Check that the path construction is reversible. - actual = CloudDeployClient.parse_common_location_path(path) - assert expected == actual + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object( - transports.CloudDeployTransport, "_prep_wrapped_messages" - ) as prep: - client = CloudDeployClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = CloudDeployClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - with mock.patch.object( - transports.CloudDeployTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = CloudDeployClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = CloudDeployAsyncClient( + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", + transport="rest", ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_delete_operation(transport: str = "grpc"): @@ -9335,6 +17162,7 @@ async def test_test_iam_permissions_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -9352,6 +17180,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: From 6daa092f281568f5e518425061f1082a07edcbc5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 22 Feb 2023 19:29:28 -0800 Subject: [PATCH 6/6] chore(main): release 1.7.0 (#156) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/deploy/gapic_version.py | 2 +- google/cloud/deploy_v1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.deploy.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 093be7e..64e0684 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.6.1" + ".": "1.7.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 3722e92..9f7f240 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.7.0](https://github.com/googleapis/python-deploy/compare/v1.6.1...v1.7.0) (2023-02-16) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#155](https://github.com/googleapis/python-deploy/issues/155)) ([3d6d5fe](https://github.com/googleapis/python-deploy/commit/3d6d5fe5c742361a9b00c4826e98d1d450743931)) + ## [1.6.1](https://github.com/googleapis/python-deploy/compare/v1.6.0...v1.6.1) (2023-01-20) diff --git a/google/cloud/deploy/gapic_version.py b/google/cloud/deploy/gapic_version.py index b4028ab..f033c61 100644 --- a/google/cloud/deploy/gapic_version.py +++ b/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.1" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/google/cloud/deploy_v1/gapic_version.py b/google/cloud/deploy_v1/gapic_version.py index b4028ab..f033c61 100644 --- a/google/cloud/deploy_v1/gapic_version.py +++ b/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.1" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index bbadce0..5125d67 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "0.1.0" + "version": "1.7.0" }, "snippets": [ {