From 669d2a985877971fb6c1eb0ad97806fbcfcc7399 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 27 May 2020 22:55:53 -0700 Subject: [PATCH 001/159] feat: add mtls support (#7) --- docs/conf.py | 5 +- .../services/cloud_memcache/client.py | 92 +++++++- .../cloud_memcache/transports/grpc.py | 55 ++++- mypy.ini | 2 +- setup.py | 4 +- synth.metadata | 11 +- .../memcache_v1beta2/test_cloud_memcache.py | 205 +++++++++++++++++- 7 files changed, 338 insertions(+), 36 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index cf879bf..54a5d17 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -38,6 +38,7 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags @@ -49,10 +50,6 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 74c3cc2..19c6bde 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -16,7 +16,8 @@ # from collections import OrderedDict -from typing import Dict, Sequence, Tuple, Type, Union +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -90,8 +91,38 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` """ - DEFAULT_OPTIONS = ClientOptions.ClientOptions( - api_endpoint="memcache.googleapis.com" + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "memcache.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT ) @classmethod @@ -121,12 +152,21 @@ def instance_path(project: str, location: str, instance: str) -> str: project=project, location=location, instance=instance ) + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parse a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + def __init__( self, *, credentials: credentials.Credentials = None, transport: Union[str, CloudMemcacheTransport] = None, - client_options: ClientOptions = DEFAULT_OPTIONS, + client_options: ClientOptions = None, ) -> None: """Instantiate the cloud memcache client. @@ -140,6 +180,17 @@ def __init__( transport to use. If set to None, a transport is chosen automatically. client_options (ClientOptions): Custom options for the client. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. + (2) If ``transport`` argument is None, ``client_options`` can be + used to create a mutual TLS transport. If ``client_cert_source`` + is provided, mutual TLS transport will be created with the given + ``api_endpoint`` or the default mTLS endpoint, and the client + SSL credentials obtained from ``client_cert_source``. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. """ if isinstance(client_options, dict): client_options = ClientOptions.from_dict(client_options) @@ -148,17 +199,46 @@ def __init__( # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, CloudMemcacheTransport): + # transport is a CloudMemcacheTransport instance. if credentials: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) self._transport = transport - else: + elif client_options is None or ( + client_options.api_endpoint is None + and client_options.client_cert_source is None + ): + # Don't trigger mTLS if we get an empty ClientOptions. Transport = type(self).get_transport_class(transport) self._transport = Transport( + credentials=credentials, host=self.DEFAULT_ENDPOINT + ) + else: + # We have a non-empty ClientOptions. If client_cert_source is + # provided, trigger mTLS with user provided endpoint or the default + # mTLS endpoint. + if client_options.client_cert_source: + api_mtls_endpoint = ( + client_options.api_endpoint + if client_options.api_endpoint + else self.DEFAULT_MTLS_ENDPOINT + ) + else: + api_mtls_endpoint = None + + api_endpoint = ( + client_options.api_endpoint + if client_options.api_endpoint + else self.DEFAULT_ENDPOINT + ) + + self._transport = CloudMemcacheGrpcTransport( credentials=credentials, - host=client_options.api_endpoint or "memcache.googleapis.com", + host=api_endpoint, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=client_options.client_cert_source, ) def list_instances( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 8c7ffad..0d09f79 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -15,11 +15,13 @@ # limitations under the License. # -from typing import Callable, Dict +from typing import Callable, Dict, Tuple from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + import grpc # type: ignore @@ -66,7 +68,9 @@ def __init__( *, host: str = "memcache.googleapis.com", credentials: credentials.Credentials = None, - channel: grpc.Channel = None + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None ) -> None: """Instantiate the transport. @@ -80,20 +84,55 @@ def __init__( This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. """ - # Sanity check: Ensure that channel and credentials are not both - # provided. if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = grpc_helpers.create_channel( + host, + credentials=credentials, + ssl_credentials=ssl_credentials, + scopes=self.AUTH_SCOPES, + ) + # Run the base constructor. super().__init__(host=host, credentials=credentials) self._stubs = {} # type: Dict[str, Callable] - # If a channel was explicitly provided, set it. - if channel: - self._grpc_channel = channel - @classmethod def create_channel( cls, diff --git a/mypy.ini b/mypy.ini index f23e6b5..4505b48 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.5 +python_version = 3.6 namespace_packages = True diff --git a/setup.py b/setup.py index ce5edb1..21fc0e3 100644 --- a/setup.py +++ b/setup.py @@ -40,9 +40,7 @@ platforms="Posix; MacOS X; Windows", include_package_data=True, install_requires=( - "google-api-core >= 1.8.0, < 2.0.0dev", - "googleapis-common-protos >= 1.5.8", - "grpcio >= 1.10.0", + "google-api-core[grpc] >= 1.17.0, < 2.0.0dev", "proto-plus >= 0.4.0", ), python_requires=">=3.6", diff --git a/synth.metadata b/synth.metadata index fe8a790..515dba6 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,13 +1,18 @@ { - "updateTime": "2020-03-25T12:13:46.329877Z", "sources": [ + { + "git": { + "name": ".", + "remote": "https://github.com/googleapis/python-memcache.git", + "sha": "247ad6661e64d32fc4ba83b65b8f1562748dabe0" + } + }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", "sha": "551cf1e6e3addcc63740427c4f9b40dedd3dac27", - "internalRef": "302792195", - "log": "551cf1e6e3addcc63740427c4f9b40dedd3dac27\nfeat: Add OS Config AgentEndpointService v1 PatchJobs and Tasks APIs.\n\nPiperOrigin-RevId: 302792195\n\n1df117114c73299b614dfd3ba3632bf246669336\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 302753982\n\n71d6c56a14bb433beb1237dccb48dabcd9597924\nRefresh monitoring client libraries.\nRename to Cloud Monitoring API.\nAdded support for TimeSeriesQueryLanguageCondition condition type in alert policies.\n\nPiperOrigin-RevId: 302735422\n\n25a1781c096974df99d556cc5888fefa82bc6425\nbazel: migrate all go_gapic_library targets to microgenerator implementation\n\n* update rules_go and gazelle bazel dependencies\n* update gapic-generator bazel dependency (with build file generator changes)\n\nPiperOrigin-RevId: 302730217\n\n36c0febd0fa7267ab66d14408eec2afd1b6bec4e\nUpdate GAPIC configurations to v2 .yaml.\n\nPiperOrigin-RevId: 302639621\n\n078f222366ed344509a48f2f084944ef61476613\nFix containeranalysis v1beta1 assembly target name\n\nPiperOrigin-RevId: 302529186\n\n0be7105dc52590fa9a24e784052298ae37ce53aa\nAdd BUILD.bazel file to asset/v1p1beta1\n\nPiperOrigin-RevId: 302154871\n\n6c248fd13e8543f8d22cbf118d978301a9fbe2a8\nAdd missing resource annotations and additional_bindings to dialogflow v2 API.\n\nPiperOrigin-RevId: 302063117\n\n9a3a7f33be9eeacf7b3e98435816b7022d206bd7\nChange the service name from \"chromeos-moblab.googleapis.com\" to \"chromeosmoblab.googleapis.com\"\n\nPiperOrigin-RevId: 302060989\n\n98a339237577e3de26cb4921f75fb5c57cc7a19f\nfeat: devtools/build/v1 publish client library config annotations\n\n* add details field to some of the BuildEvents\n* add final_invocation_id and build_tool_exit_code fields to BuildStatus\n\nPiperOrigin-RevId: 302044087\n\ncfabc98c6bbbb22d1aeaf7612179c0be193b3a13\nfeat: home/graph/v1 publish client library config annotations & comment updates\n\nThis change includes adding the client library configuration annotations, updated proto comments, and some client library configuration files.\n\nPiperOrigin-RevId: 302042647\n\nc8c8c0bd15d082db9546253dbaad1087c7a9782c\nchore: use latest gapic-generator in bazel WORKSPACE.\nincluding the following commits from gapic-generator:\n- feat: take source protos in all sub-packages (#3144)\n\nPiperOrigin-RevId: 301843591\n\ne4daf5202ea31cb2cb6916fdbfa9d6bd771aeb4c\nAdd bazel file for v1 client lib generation\n\nPiperOrigin-RevId: 301802926\n\n275fbcce2c900278d487c33293a3c7e1fbcd3a34\nfeat: pubsub/v1 add an experimental filter field to Subscription\n\nPiperOrigin-RevId: 301661567\n\nf2b18cec51d27c999ad30011dba17f3965677e9c\nFix: UpdateBackupRequest.backup is a resource, not a resource reference - remove annotation.\n\nPiperOrigin-RevId: 301636171\n\n800384063ac93a0cac3a510d41726fa4b2cd4a83\nCloud Billing Budget API v1beta1\nModified api documentation to include warnings about the new filter field.\n\nPiperOrigin-RevId: 301634389\n\n0cc6c146b660db21f04056c3d58a4b752ee445e3\nCloud Billing Budget API v1alpha1\nModified api documentation to include warnings about the new filter field.\n\nPiperOrigin-RevId: 301630018\n\nff2ea00f69065585c3ac0993c8b582af3b6fc215\nFix: Add resource definition for a parent of InspectTemplate which was otherwise missing.\n\nPiperOrigin-RevId: 301623052\n\n55fa441c9daf03173910760191646399338f2b7c\nAdd proto definition for AccessLevel, AccessPolicy, and ServicePerimeter.\n\nPiperOrigin-RevId: 301620844\n\ne7b10591c5408a67cf14ffafa267556f3290e262\nCloud Bigtable Managed Backup service and message proto files.\n\nPiperOrigin-RevId: 301585144\n\nd8e226f702f8ddf92915128c9f4693b63fb8685d\nfeat: Add time-to-live in a queue for builds\n\nPiperOrigin-RevId: 301579876\n\n430375af011f8c7a5174884f0d0e539c6ffa7675\ndocs: add missing closing backtick\n\nPiperOrigin-RevId: 301538851\n\n0e9f1f60ded9ad1c2e725e37719112f5b487ab65\nbazel: Use latest release of gax_java\n\nPiperOrigin-RevId: 301480457\n\n5058c1c96d0ece7f5301a154cf5a07b2ad03a571\nUpdate GAPIC v2 with batching parameters for Logging API\n\nPiperOrigin-RevId: 301443847\n\n64ab9744073de81fec1b3a6a931befc8a90edf90\nFix: Introduce location-based organization/folder/billing-account resources\nChore: Update copyright years\n\nPiperOrigin-RevId: 301373760\n\n23d5f09e670ebb0c1b36214acf78704e2ecfc2ac\nUpdate field_behavior annotations in V1 and V2.\n\nPiperOrigin-RevId: 301337970\n\nb2cf37e7fd62383a811aa4d54d013ecae638851d\nData Catalog V1 API\n\nPiperOrigin-RevId: 301282503\n\n1976b9981e2900c8172b7d34b4220bdb18c5db42\nCloud DLP api update. Adds missing fields to Finding and adds support for hybrid jobs.\n\nPiperOrigin-RevId: 301205325\n\nae78682c05e864d71223ce22532219813b0245ac\nfix: several sample code blocks in comments are now properly indented for markdown\n\nPiperOrigin-RevId: 301185150\n\ndcd171d04bda5b67db13049320f97eca3ace3731\nPublish Media Translation API V1Beta1\n\nPiperOrigin-RevId: 301180096\n\nff1713453b0fbc5a7544a1ef6828c26ad21a370e\nAdd protos and BUILD rules for v1 API.\n\nPiperOrigin-RevId: 301179394\n\n8386761d09819b665b6a6e1e6d6ff884bc8ff781\nfeat: chromeos/modlab publish protos and config for Chrome OS Moblab API.\n\nPiperOrigin-RevId: 300843960\n\nb2e2bc62fab90e6829e62d3d189906d9b79899e4\nUpdates to GCS gRPC API spec:\n\n1. Changed GetIamPolicy and TestBucketIamPermissions to use wrapper messages around google.iam.v1 IAM requests messages, and added CommonRequestParams. This lets us support RequesterPays buckets.\n2. Added a metadata field to GetObjectMediaResponse, to support resuming an object media read safely (by extracting the generation of the object being read, and using it in the resumed read request).\n\nPiperOrigin-RevId: 300817706\n\n7fd916ce12335cc9e784bb9452a8602d00b2516c\nAdd deprecated_collections field for backward-compatiblity in PHP and monolith-generated Python and Ruby clients.\n\nGenerate TopicName class in Java which covers the functionality of both ProjectTopicName and DeletedTopicName. Introduce breaking changes to be fixed by synth.py.\n\nDelete default retry parameters.\n\nRetry codes defs can be deleted once # https://github.com/googleapis/gapic-generator/issues/3137 is fixed.\n\nPiperOrigin-RevId: 300813135\n\n047d3a8ac7f75383855df0166144f891d7af08d9\nfix!: google/rpc refactor ErrorInfo.type to ErrorInfo.reason and comment updates.\n\nPiperOrigin-RevId: 300773211\n\nfae4bb6d5aac52aabe5f0bb4396466c2304ea6f6\nAdding RetryPolicy to pubsub.proto\n\nPiperOrigin-RevId: 300769420\n\n7d569be2928dbd72b4e261bf9e468f23afd2b950\nAdding additional protocol buffer annotations to v3.\n\nPiperOrigin-RevId: 300718800\n\n13942d1a85a337515040a03c5108993087dc0e4f\nAdd logging protos for Recommender v1.\n\nPiperOrigin-RevId: 300689896\n\na1a573c3eecfe2c404892bfa61a32dd0c9fb22b6\nfix: change go package to use cloud.google.com/go/maps\n\nPiperOrigin-RevId: 300661825\n\nc6fbac11afa0c7ab2972d9df181493875c566f77\nfeat: publish documentai/v1beta2 protos\n\nPiperOrigin-RevId: 300656808\n\n5202a9e0d9903f49e900f20fe5c7f4e42dd6588f\nProtos for v1beta1 release of Cloud Security Center Settings API\n\nPiperOrigin-RevId: 300580858\n\n83518e18655d9d4ac044acbda063cc6ecdb63ef8\nAdds gapic.yaml file and BUILD.bazel file.\n\nPiperOrigin-RevId: 300554200\n\n836c196dc8ef8354bbfb5f30696bd3477e8db5e2\nRegenerate recommender v1beta1 gRPC ServiceConfig file for Insights methods.\n\nPiperOrigin-RevId: 300549302\n\n34a5450c591b6be3d6566f25ac31caa5211b2f3f\nIncreases the default timeout from 20s to 30s for MetricService\n\nPiperOrigin-RevId: 300474272\n\n5d8bffe87cd01ba390c32f1714230e5a95d5991d\nfeat: use the latest gapic-generator in WORKSPACE for bazel build.\n\nPiperOrigin-RevId: 300461878\n\nd631c651e3bcfac5d371e8560c27648f7b3e2364\nUpdated the GAPIC configs to include parameters for Backups APIs.\n\nPiperOrigin-RevId: 300443402\n\n678afc7055c1adea9b7b54519f3bdb228013f918\nAdding Game Servers v1beta API.\n\nPiperOrigin-RevId: 300433218\n\n80d2bd2c652a5e213302041b0620aff423132589\nEnable proto annotation and gapic v2 for talent API.\n\nPiperOrigin-RevId: 300393997\n\n85e454be7a353f7fe1bf2b0affb753305785b872\ndocs(google/maps/roads): remove mention of nonexported api\n\nPiperOrigin-RevId: 300367734\n\nbf839ae632e0f263a729569e44be4b38b1c85f9c\nAdding protocol buffer annotations and updated config info for v1 and v2.\n\nPiperOrigin-RevId: 300276913\n\n309b899ca18a4c604bce63882a161d44854da549\nPublish `Backup` APIs and protos.\n\nPiperOrigin-RevId: 300246038\n\neced64c3f122421350b4aca68a28e89121d20db8\nadd PHP client libraries\n\nPiperOrigin-RevId: 300193634\n\n7727af0e39df1ae9ad715895c8576d7b65cf6c6d\nfeat: use the latest gapic-generator and protoc-java-resource-name-plugin in googleapis/WORKSPACE.\n\nPiperOrigin-RevId: 300188410\n\n2a25aa351dd5b5fe14895266aff5824d90ce757b\nBreaking change: remove the ProjectOrTenant resource and its references.\n\nPiperOrigin-RevId: 300182152\n\na499dbb28546379415f51803505cfb6123477e71\nUpdate web risk v1 gapic config and BUILD file.\n\nPiperOrigin-RevId: 300152177\n\n52701da10fec2a5f9796e8d12518c0fe574488fe\nFix: apply appropriate namespace/package options for C#, PHP and Ruby.\n\nPiperOrigin-RevId: 300123508\n\n365c029b8cdb63f7751b92ab490f1976e616105c\nAdd CC targets to the kms protos.\n\nThese are needed by go/tink.\n\nPiperOrigin-RevId: 300038469\n\n4ba9aa8a4a1413b88dca5a8fa931824ee9c284e6\nExpose logo recognition API proto for GA.\n\nPiperOrigin-RevId: 299971671\n\n1c9fc2c9e03dadf15f16b1c4f570955bdcebe00e\nAdding ruby_package option to accessapproval.proto for the Ruby client libraries generation.\n\nPiperOrigin-RevId: 299955924\n\n1cc6f0a7bfb147e6f2ede911d9b01e7a9923b719\nbuild(google/maps/routes): generate api clients\n\nPiperOrigin-RevId: 299955905\n\n29a47c965aac79e3fe8e3314482ca0b5967680f0\nIncrease timeout to 1hr for method `dropRange` in bigtable/admin/v2, which is\nsynced with the timeout setting in gapic_yaml.\n\nPiperOrigin-RevId: 299917154\n\n8f631c4c70a60a9c7da3749511ee4ad432b62898\nbuild(google/maps/roads/v1op): move go to monorepo pattern\n\nPiperOrigin-RevId: 299885195\n\nd66816518844ebbf63504c9e8dfc7133921dd2cd\nbuild(google/maps/roads/v1op): Add bazel build files to generate clients.\n\nPiperOrigin-RevId: 299851148\n\naf7dff701fabe029672168649c62356cf1bb43d0\nAdd LogPlayerReports and LogImpressions to Playable Locations service\n\nPiperOrigin-RevId: 299724050\n\nb6927fca808f38df32a642c560082f5bf6538ced\nUpdate BigQuery Connection API v1beta1 proto: added credential to CloudSqlProperties.\n\nPiperOrigin-RevId: 299503150\n\n91e1fb5ef9829c0c7a64bfa5bde330e6ed594378\nchore: update protobuf (protoc) version to 3.11.2\n\nPiperOrigin-RevId: 299404145\n\n30e36b4bee6749c4799f4fc1a51cc8f058ba167d\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 299399890\n\nffbb493674099f265693872ae250711b2238090c\nfeat: cloudbuild/v1 add new fields and annotate OUTPUT_OUT fields.\n\nPiperOrigin-RevId: 299397780\n\nbc973a15818e00c19e121959832676e9b7607456\nbazel: Fix broken common dependency\n\nPiperOrigin-RevId: 299397431\n\n71094a343e3b962e744aa49eb9338219537474e4\nchore: bigtable/admin/v2 publish retry config\n\nPiperOrigin-RevId: 299391875\n\n8f488efd7bda33885cb674ddd023b3678c40bd82\nfeat: Migrate logging to GAPIC v2; release new features.\n\nIMPORTANT: This is a breaking change for client libraries\nin all languages.\n\nCommitter: @lukesneeringer, @jskeet\nPiperOrigin-RevId: 299370279\n\n007605bf9ad3a1fd775014ebefbf7f1e6b31ee71\nUpdate API for bigqueryreservation v1beta1.\n- Adds flex capacity commitment plan to CapacityCommitment.\n- Adds methods for getting and updating BiReservations.\n- Adds methods for updating/splitting/merging CapacityCommitments.\n\nPiperOrigin-RevId: 299368059\n\nf0b581b5bdf803e45201ecdb3688b60e381628a8\nfix: recommendationengine/v1beta1 update some comments\n\nPiperOrigin-RevId: 299181282\n\n10e9a0a833dc85ff8f05b2c67ebe5ac785fe04ff\nbuild: add generated BUILD file for Routes Preferred API\n\nPiperOrigin-RevId: 299164808\n\n86738c956a8238d7c77f729be78b0ed887a6c913\npublish v1p1beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299152383\n\n73d9f2ad4591de45c2e1f352bc99d70cbd2a6d95\npublish v1: update with absolute address in comments\n\nPiperOrigin-RevId: 299147194\n\nd2158f24cb77b0b0ccfe68af784c6a628705e3c6\npublish v1beta2: update with absolute address in comments\n\nPiperOrigin-RevId: 299147086\n\n7fca61292c11b4cd5b352cee1a50bf88819dd63b\npublish v1p2beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299146903\n\n583b7321624736e2c490e328f4b1957335779295\npublish v1p3beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299146674\n\n638253bf86d1ce1c314108a089b7351440c2f0bf\nfix: add java_multiple_files option for automl text_sentiment.proto\n\nPiperOrigin-RevId: 298971070\n\n373d655703bf914fb8b0b1cc4071d772bac0e0d1\nUpdate Recs AI Beta public bazel file\n\nPiperOrigin-RevId: 298961623\n\ndcc5d00fc8a8d8b56f16194d7c682027b2c66a3b\nfix: add java_multiple_files option for automl classification.proto\n\nPiperOrigin-RevId: 298953301\n\na3f791827266f3496a6a5201d58adc4bb265c2a3\nchore: automl/v1 publish annotations and retry config\n\nPiperOrigin-RevId: 298942178\n\n01c681586d8d6dbd60155289b587aee678530bd9\nMark return_immediately in PullRequest deprecated.\n\nPiperOrigin-RevId: 298893281\n\nc9f5e9c4bfed54bbd09227e990e7bded5f90f31c\nRemove out of date documentation for predicate support on the Storage API\n\nPiperOrigin-RevId: 298883309\n\nfd5b3b8238d783b04692a113ffe07c0363f5de0f\ngenerate webrisk v1 proto\n\nPiperOrigin-RevId: 298847934\n\n" + "internalRef": "302792195" } }, { diff --git a/tests/unit/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/memcache_v1beta2/test_cloud_memcache.py index a93cd40..b7e87f4 100644 --- a/tests/unit/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/memcache_v1beta2/test_cloud_memcache.py @@ -24,6 +24,7 @@ from google import auth from google.api_core import client_options from google.api_core import future +from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.auth import credentials from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient @@ -36,6 +37,39 @@ from google.protobuf import timestamp_pb2 as timestamp # type: ignore +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudMemcacheClient._get_default_mtls_endpoint(None) is None + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + def test_cloud_memcache_client_from_service_account_file(): creds = credentials.AnonymousCredentials() with mock.patch.object( @@ -52,28 +86,89 @@ def test_cloud_memcache_client_from_service_account_file(): def test_cloud_memcache_client_client_options(): - # Check the default options have their expected values. - assert CloudMemcacheClient.DEFAULT_OPTIONS.api_endpoint == "memcache.googleapis.com" + # Check that if channel is provided we won't create a new one. + with mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.CloudMemcacheClient.get_transport_class" + ) as gtc: + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials() + ) + client = CloudMemcacheClient(transport=transport) + gtc.assert_not_called() - # Check that options can be customized. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + # Check mTLS is not triggered with empty client options. + options = client_options.ClientOptions() with mock.patch( "google.cloud.memcache_v1beta2.services.cloud_memcache.CloudMemcacheClient.get_transport_class" ) as gtc: transport = gtc.return_value = mock.MagicMock() client = CloudMemcacheClient(client_options=options) - transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + transport.assert_called_once_with( + credentials=None, host=client.DEFAULT_ENDPOINT + ) + + # Check mTLS is not triggered if api_endpoint is provided but + # client_cert_source is None. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudMemcacheClient(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint=None, + client_cert_source=None, + credentials=None, + host="squid.clam.whelk", + ) + + # Check mTLS is triggered if client_cert_source is provided. + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudMemcacheClient(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + credentials=None, + host=client.DEFAULT_ENDPOINT, + ) + + # Check mTLS is triggered if api_endpoint and client_cert_source are provided. + options = client_options.ClientOptions( + api_endpoint="squid.clam.whelk", client_cert_source=client_cert_source_callback + ) + with mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudMemcacheClient(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=client_cert_source_callback, + credentials=None, + host="squid.clam.whelk", + ) def test_cloud_memcache_client_client_options_from_dict(): with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.CloudMemcacheClient.get_transport_class" - ) as gtc: - transport = gtc.return_value = mock.MagicMock() + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None client = CloudMemcacheClient( client_options={"api_endpoint": "squid.clam.whelk"} ) - transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + grpc_transport.assert_called_once_with( + api_mtls_endpoint=None, + client_cert_source=None, + credentials=None, + host="squid.clam.whelk", + ) def test_list_instances(transport: str = "grpc"): @@ -717,8 +812,87 @@ def test_cloud_memcache_host_with_port(): def test_cloud_memcache_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - transport = transports.CloudMemcacheGrpcTransport(channel=channel) - assert transport.grpc_channel is channel + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_cloud_memcache_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + ssl_credentials=mock_ssl_cred, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_cloud_memcache_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + ssl_credentials=mock_ssl_cred, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ) + assert transport.grpc_channel == mock_grpc_channel def test_cloud_memcache_grpc_lro_client(): @@ -744,3 +918,12 @@ def test_instance_path(): ) actual = CloudMemcacheClient.instance_path(project, location, instance) assert expected == actual + + +def test_parse_instance_path(): + expected = {"project": "octopus", "location": "oyster", "instance": "nudibranch"} + path = CloudMemcacheClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_instance_path(path) + assert expected == actual From e34e2b1a2b47476cb6a0dcd932dcfd030018936f Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 27 May 2020 23:04:19 -0700 Subject: [PATCH 002/159] docs: add multiprocessing (#8) --- .flake8 | 16 ++++++++++++++++ .github/ISSUE_TEMPLATE/bug_report.md | 3 +-- .kokoro/publish-docs.sh | 2 -- .kokoro/release.sh | 2 -- CONTRIBUTING.rst | 15 +++------------ MANIFEST.in | 16 ++++++++++++++++ docs/index.rst | 2 ++ docs/multiprocessing.rst | 7 +++++++ noxfile.py | 5 ++--- setup.cfg | 16 ++++++++++++++++ synth.metadata | 2 +- 11 files changed, 64 insertions(+), 22 deletions(-) create mode 100644 docs/multiprocessing.rst diff --git a/.flake8 b/.flake8 index 9f96f02..ac68f2a 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [flake8] ignore = E203, E266, E501, W503, F401, F841, E712 diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 3f62537..3f5c313 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -11,8 +11,7 @@ Thanks for stopping by to let us know something could be better! Please run down the following list and make sure you've tried the usual "quick fixes": - Search the issues already opened: https://github.com/googleapis/python-memcache/issues - - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python - - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python If you are still having issues, please be sure to include as much information as possible: diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 839e1bc..2bb90a9 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Disable buffering, so that the logs stream through. diff --git a/.kokoro/release.sh b/.kokoro/release.sh index a1b4b89..ee38821 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Start the releasetool reporter diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 69e7e34..8577546 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, and 3.7 on both UNIX and Windows. + 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -214,26 +214,18 @@ We support: - `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ +- `Python 3.8`_ .. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-memcache/blob/master/noxfile.py -We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ -and lack of continuous integration `support`_. - -.. _Python 2.5: https://docs.python.org/2.5/ -.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ -.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ - -We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no -longer supported by the core development team. - Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version @@ -247,7 +239,6 @@ We also explicitly decided to support Python 3 beginning with version .. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django .. _projects: http://flask.pocoo.org/docs/0.10/python3/ .. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ -.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 ********** Versioning diff --git a/MANIFEST.in b/MANIFEST.in index cd011be..68855ab 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto diff --git a/docs/index.rst b/docs/index.rst index 1d117ce..221e3bf 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,5 +1,7 @@ .. include:: README.rst +.. include:: multiprocessing.rst + API Reference ------------- diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst new file mode 100644 index 0000000..1cb29d4 --- /dev/null +++ b/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpcio` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.Pool` or + :class:`multiprocessing.Process`. diff --git a/noxfile.py b/noxfile.py index 85e80db..efb6b03 100644 --- a/noxfile.py +++ b/noxfile.py @@ -110,8 +110,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest") - + session.install("mock", "pytest", "google-cloud-testutils") session.install("-e", ".") # Run py.test against the system tests. @@ -139,7 +138,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/setup.cfg b/setup.cfg index 3bd5555..c3a2b39 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 diff --git a/synth.metadata b/synth.metadata index 515dba6..43c1e10 100644 --- a/synth.metadata +++ b/synth.metadata @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "ee4330a0e5f4b93978e8683fbda8e6d4148326b7" + "sha": "d2364eb80b840a36136c8ce12f1c6efabcc9600e" } } ], From 2ec9b3d1364c6c5886fc263e48f7c64d130546f1 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 28 May 2020 09:44:58 -0700 Subject: [PATCH 003/159] chore: use latest sphinx (#10) --- noxfile.py | 2 +- synth.metadata | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/noxfile.py b/noxfile.py index efb6b03..8563032 100644 --- a/noxfile.py +++ b/noxfile.py @@ -138,7 +138,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/synth.metadata b/synth.metadata index 43c1e10..55c0f24 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,22 +4,22 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-memcache.git", - "sha": "247ad6661e64d32fc4ba83b65b8f1562748dabe0" + "sha": "e34e2b1a2b47476cb6a0dcd932dcfd030018936f" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "551cf1e6e3addcc63740427c4f9b40dedd3dac27", - "internalRef": "302792195" + "sha": "eafa840ceec23b44a5c21670288107c661252711", + "internalRef": "313488995" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d2364eb80b840a36136c8ce12f1c6efabcc9600e" + "sha": "71b8a272549c06b5768d00fa48d3ae990e871bec" } } ], From bec03204329a93e45a718d78859b2e1e0a57be5f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 28 May 2020 23:58:10 +0000 Subject: [PATCH 004/159] chore: release 0.2.0 (#9) :robot: I have created a release \*beep\* \*boop\* --- ## [0.2.0](https://www.github.com/googleapis/python-memcache/compare/v0.1.0...v0.2.0) (2020-05-28) ### Features * add mtls support ([#7](https://www.github.com/googleapis/python-memcache/issues/7)) ([669d2a9](https://www.github.com/googleapis/python-memcache/commit/669d2a985877971fb6c1eb0ad97806fbcfcc7399)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9d26d89..e4f20fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.2.0](https://www.github.com/googleapis/python-memcache/compare/v0.1.0...v0.2.0) (2020-05-28) + + +### Features + +* add mtls support ([#7](https://www.github.com/googleapis/python-memcache/issues/7)) ([669d2a9](https://www.github.com/googleapis/python-memcache/commit/669d2a985877971fb6c1eb0ad97806fbcfcc7399)) + ## 0.1.0 (2020-03-03) diff --git a/setup.py b/setup.py index 21fc0e3..31d56ba 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "0.1.0" +version = "0.2.0" package_root = os.path.abspath(os.path.dirname(__file__)) From 63d578c2938fcb8dc1442eca2d33129b5d3c237d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 22 Sep 2020 16:14:21 -0600 Subject: [PATCH 005/159] chore: add default CODEOWNERS (#17) --- .github/CODEOWNERS | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..30c3973 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + +# The @googleapis/yoshi-python is the default owner for changes in this repo +* @googleapis/yoshi-python + +# The python-samples-reviewers team is the default owner for samples changes +/samples/ @googleapis/python-samples-owners \ No newline at end of file From 0bbc337594e2a44c51a5b372670d72499592e2e0 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 28 Dec 2020 10:00:09 -0800 Subject: [PATCH 006/159] feat: add async client (#26) --- .flake8 | 5 +- .github/snippet-bot.yml | 0 .gitignore | 7 +- .kokoro/build.sh | 8 +- .kokoro/docker/docs/Dockerfile | 98 + .kokoro/docker/docs/fetch_gpg_keys.sh | 45 + .kokoro/docs/common.cfg | 21 +- .kokoro/docs/docs-presubmit.cfg | 17 + .kokoro/populate-secrets.sh | 43 + .kokoro/publish-docs.sh | 39 +- .kokoro/release/common.cfg | 50 +- .kokoro/samples/lint/common.cfg | 34 + .kokoro/samples/lint/continuous.cfg | 6 + .kokoro/samples/lint/periodic.cfg | 6 + .kokoro/samples/lint/presubmit.cfg | 6 + .kokoro/samples/python3.6/common.cfg | 40 + .kokoro/samples/python3.6/continuous.cfg | 7 + .kokoro/samples/python3.6/periodic.cfg | 6 + .kokoro/samples/python3.6/presubmit.cfg | 6 + .kokoro/samples/python3.7/common.cfg | 40 + .kokoro/samples/python3.7/continuous.cfg | 6 + .kokoro/samples/python3.7/periodic.cfg | 6 + .kokoro/samples/python3.7/presubmit.cfg | 6 + .kokoro/samples/python3.8/common.cfg | 40 + .kokoro/samples/python3.8/continuous.cfg | 6 + .kokoro/samples/python3.8/periodic.cfg | 6 + .kokoro/samples/python3.8/presubmit.cfg | 6 + .kokoro/test-samples.sh | 110 + .kokoro/trampoline.sh | 15 +- .kokoro/trampoline_v2.sh | 487 ++++ .pre-commit-config.yaml | 17 + .trampolinerc | 51 + CODE_OF_CONDUCT.md | 123 +- CONTRIBUTING.rst | 40 +- MANIFEST.in | 3 + docs/_templates/layout.html | 4 +- docs/conf.py | 25 +- docs/memcache_v1beta2/services.rst | 6 +- docs/memcache_v1beta2/types.rst | 5 +- google/cloud/memcache/__init__.py | 5 +- google/cloud/memcache_v1beta2/__init__.py | 1 - .../services/cloud_memcache/__init__.py | 6 +- .../services/cloud_memcache/async_client.py | 844 ++++++ .../services/cloud_memcache/client.py | 450 ++- .../services/cloud_memcache/pagers.py | 79 +- .../cloud_memcache/transports/__init__.py | 9 +- .../cloud_memcache/transports/base.py | 143 +- .../cloud_memcache/transports/grpc.py | 167 +- .../cloud_memcache/transports/grpc_asyncio.py | 486 ++++ .../cloud/memcache_v1beta2/types/__init__.py | 3 +- .../memcache_v1beta2/types/cloud_memcache.py | 86 +- noxfile.py | 81 +- scripts/decrypt-secrets.sh | 46 + scripts/fixup_memcache_v1beta2_keywords.py | 185 ++ scripts/readme-gen/readme_gen.py | 66 + scripts/readme-gen/templates/README.tmpl.rst | 87 + scripts/readme-gen/templates/auth.tmpl.rst | 9 + .../templates/auth_api_key.tmpl.rst | 14 + .../templates/install_deps.tmpl.rst | 29 + .../templates/install_portaudio.tmpl.rst | 35 + setup.py | 4 +- synth.metadata | 15 +- synth.py | 49 +- testing/.gitignore | 3 + tests/unit/gapic/memcache_v1beta2/__init__.py | 1 + .../memcache_v1beta2/test_cloud_memcache.py | 2529 +++++++++++++++++ .../memcache_v1beta2/test_cloud_memcache.py | 929 ------ 67 files changed, 6437 insertions(+), 1370 deletions(-) create mode 100644 .github/snippet-bot.yml create mode 100644 .kokoro/docker/docs/Dockerfile create mode 100755 .kokoro/docker/docs/fetch_gpg_keys.sh create mode 100644 .kokoro/docs/docs-presubmit.cfg create mode 100755 .kokoro/populate-secrets.sh create mode 100644 .kokoro/samples/lint/common.cfg create mode 100644 .kokoro/samples/lint/continuous.cfg create mode 100644 .kokoro/samples/lint/periodic.cfg create mode 100644 .kokoro/samples/lint/presubmit.cfg create mode 100644 .kokoro/samples/python3.6/common.cfg create mode 100644 .kokoro/samples/python3.6/continuous.cfg create mode 100644 .kokoro/samples/python3.6/periodic.cfg create mode 100644 .kokoro/samples/python3.6/presubmit.cfg create mode 100644 .kokoro/samples/python3.7/common.cfg create mode 100644 .kokoro/samples/python3.7/continuous.cfg create mode 100644 .kokoro/samples/python3.7/periodic.cfg create mode 100644 .kokoro/samples/python3.7/presubmit.cfg create mode 100644 .kokoro/samples/python3.8/common.cfg create mode 100644 .kokoro/samples/python3.8/continuous.cfg create mode 100644 .kokoro/samples/python3.8/periodic.cfg create mode 100644 .kokoro/samples/python3.8/presubmit.cfg create mode 100755 .kokoro/test-samples.sh create mode 100755 .kokoro/trampoline_v2.sh create mode 100644 .pre-commit-config.yaml create mode 100644 .trampolinerc create mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py create mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py create mode 100755 scripts/decrypt-secrets.sh create mode 100644 scripts/fixup_memcache_v1beta2_keywords.py create mode 100644 scripts/readme-gen/readme_gen.py create mode 100644 scripts/readme-gen/templates/README.tmpl.rst create mode 100644 scripts/readme-gen/templates/auth.tmpl.rst create mode 100644 scripts/readme-gen/templates/auth_api_key.tmpl.rst create mode 100644 scripts/readme-gen/templates/install_deps.tmpl.rst create mode 100644 scripts/readme-gen/templates/install_portaudio.tmpl.rst create mode 100644 testing/.gitignore create mode 100644 tests/unit/gapic/memcache_v1beta2/__init__.py create mode 100644 tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py delete mode 100644 tests/unit/memcache_v1beta2/test_cloud_memcache.py diff --git a/.flake8 b/.flake8 index ac68f2a..29227d4 100644 --- a/.flake8 +++ b/.flake8 @@ -16,14 +16,17 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503, F401, F841, E712 +ignore = E203, E266, E501, W503 exclude = # Exclude generated code. **/proto/** **/gapic/** + **/services/** + **/types/** *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml new file mode 100644 index 0000000..e69de29 diff --git a/.gitignore b/.gitignore index df79b14..b9daa52 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ dist build eggs +.eggs parts bin var @@ -44,15 +45,17 @@ pip-log.txt # Built documentation docs/_build -htmlcov +bigquery/docs/generated +docs.metadata # Virtual environment env/ coverage.xml +sponge_log.xml # System test environment variables. system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 6c21d00..b96af36 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile new file mode 100644 index 0000000..412b0b5 --- /dev/null +++ b/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 0000000..d653dd8 --- /dev/null +++ b/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Ɓukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 2054a56..6197ce8 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" +build_file: "python-memcache/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 0000000..1118107 --- /dev/null +++ b/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 0000000..f525142 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 2bb90a9..8acb14e 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-memcache - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 858e856..b4b40c4 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-memcache/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg new file mode 100644 index 0000000..31f0ad7 --- /dev/null +++ b/.kokoro/samples/lint/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "lint" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/lint/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/lint/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/lint/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg new file mode 100644 index 0000000..bfd6190 --- /dev/null +++ b/.kokoro/samples/python3.6/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.6" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg new file mode 100644 index 0000000..7218af1 --- /dev/null +++ b/.kokoro/samples/python3.6/continuous.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.6/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg new file mode 100644 index 0000000..a7be3aa --- /dev/null +++ b/.kokoro/samples/python3.7/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.7" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.7/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.7/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg new file mode 100644 index 0000000..ec8abd3 --- /dev/null +++ b/.kokoro/samples/python3.8/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.8" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.8/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.8/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh new file mode 100755 index 0000000..f2285be --- /dev/null +++ b/.kokoro/test-samples.sh @@ -0,0 +1,110 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-memcache + +# Run periodic samples tests at latest release +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + LATEST_RELEASE=$(git describe --abbrev=0 --tags) + git checkout $LATEST_RELEASE +fi + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index e8c4251..f39236e 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 0000000..719bcd5 --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..a9024b1 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.4 + hooks: + - id: flake8 diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 0000000..995ee29 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f60..039f436 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 8577546..92e2f10 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** @@ -130,6 +111,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for @@ -211,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-memcache/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/MANIFEST.in b/MANIFEST.in index 68855ab..e9e29d1 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -20,3 +20,6 @@ recursive-include google *.json *.proto recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen \ No newline at end of file diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index 228529e..6316a53 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/docs/conf.py b/docs/conf.py index 54a5d17..a84e615 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,12 +20,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -35,6 +39,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -43,7 +48,7 @@ # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True @@ -90,7 +95,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -335,10 +345,11 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/docs/memcache_v1beta2/services.rst b/docs/memcache_v1beta2/services.rst index 72a3e8a..6b2845e 100644 --- a/docs/memcache_v1beta2/services.rst +++ b/docs/memcache_v1beta2/services.rst @@ -1,6 +1,6 @@ -Client for Google Cloud Memcache API -==================================== +Services for Google Cloud Memcache v1beta2 API +============================================== -.. automodule:: google.cloud.memcache_v1beta2 +.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache :members: :inherited-members: diff --git a/docs/memcache_v1beta2/types.rst b/docs/memcache_v1beta2/types.rst index 746c77b..1b47aa6 100644 --- a/docs/memcache_v1beta2/types.rst +++ b/docs/memcache_v1beta2/types.rst @@ -1,5 +1,6 @@ -Types for Google Cloud Memcache API -=================================== +Types for Google Cloud Memcache v1beta2 API +=========================================== .. automodule:: google.cloud.memcache_v1beta2.types :members: + :show-inheritance: diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index 11d5e0d..b66f063 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -15,7 +15,9 @@ # limitations under the License. # - +from google.cloud.memcache_v1beta2.services.cloud_memcache.async_client import ( + CloudMemcacheAsyncClient, +) from google.cloud.memcache_v1beta2.services.cloud_memcache.client import ( CloudMemcacheClient, ) @@ -36,6 +38,7 @@ __all__ = ( "ApplyParametersRequest", + "CloudMemcacheAsyncClient", "CloudMemcacheClient", "CreateInstanceRequest", "DeleteInstanceRequest", diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index 6c9725e..13cd09f 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -15,7 +15,6 @@ # limitations under the License. # - from .services.cloud_memcache import CloudMemcacheClient from .types.cloud_memcache import ApplyParametersRequest from .types.cloud_memcache import CreateInstanceRequest diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py index 0cc5e89..8524cb4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py @@ -16,5 +16,9 @@ # from .client import CloudMemcacheClient +from .async_client import CloudMemcacheAsyncClient -__all__ = ("CloudMemcacheClient",) +__all__ = ( + "CloudMemcacheClient", + "CloudMemcacheAsyncClient", +) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py new file mode 100644 index 0000000..9373a06 --- /dev/null +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -0,0 +1,844 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers +from google.cloud.memcache_v1beta2.types import cloud_memcache +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport +from .client import CloudMemcacheClient + + +class CloudMemcacheAsyncClient: + """Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be refering to a GCP ``region``; for + example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + """ + + _client: CloudMemcacheClient + + DEFAULT_ENDPOINT = CloudMemcacheClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudMemcacheClient.DEFAULT_MTLS_ENDPOINT + + instance_path = staticmethod(CloudMemcacheClient.instance_path) + parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) + + common_billing_account_path = staticmethod( + CloudMemcacheClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudMemcacheClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) + parse_common_folder_path = staticmethod( + CloudMemcacheClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + CloudMemcacheClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CloudMemcacheClient.parse_common_organization_path + ) + + common_project_path = staticmethod(CloudMemcacheClient.common_project_path) + parse_common_project_path = staticmethod( + CloudMemcacheClient.parse_common_project_path + ) + + common_location_path = staticmethod(CloudMemcacheClient.common_location_path) + parse_common_location_path = staticmethod( + CloudMemcacheClient.parse_common_location_path + ) + + from_service_account_file = CloudMemcacheClient.from_service_account_file + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudMemcacheTransport: + """Return the transport used by the client instance. + + Returns: + CloudMemcacheTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudMemcacheClient).get_transport_class, type(CloudMemcacheClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud memcache client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudMemcacheTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = CloudMemcacheClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_instances( + self, + request: cloud_memcache.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists Instances in a given project and location. + + Args: + request (:class:`~.cloud_memcache.ListInstancesRequest`): + The request object. Request for + [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListInstancesAsyncPager: + Response for + [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: cloud_memcache.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.Instance: + r"""Gets details of a single Instance. + + Args: + request (:class:`~.cloud_memcache.GetInstanceRequest`): + The request object. Request for + [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. + name (:class:`str`): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_memcache.Instance: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_instance( + self, + request: cloud_memcache.CreateInstanceRequest = None, + *, + parent: str = None, + instance_id: str = None, + resource: cloud_memcache.Instance = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Instance in a given project and + location. + + Args: + request (:class:`~.cloud_memcache.CreateInstanceRequest`): + The request object. Request for + [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The logical name of the Memcached instance in + the user project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the user project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (:class:`~.cloud_memcache.Instance`): + Required. A Memcached [Instance] resource + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.cloud_memcache.Instance``: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance_id, resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance( + self, + request: cloud_memcache.UpdateInstanceRequest = None, + *, + update_mask: field_mask.FieldMask = None, + resource: cloud_memcache.Instance = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing Instance in a given project and + location. + + Args: + request (:class:`~.cloud_memcache.UpdateInstanceRequest`): + The request object. Request for + [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. + update_mask (:class:`~.field_mask.FieldMask`): + Required. Mask of fields to update. + + - ``displayName`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (:class:`~.cloud_memcache.Instance`): + Required. A Memcached [Instance] resource. Only fields + specified in update_mask are updated. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.cloud_memcache.Instance``: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if update_mask is not None: + request.update_mask = update_mask + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource.name", request.resource.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_parameters( + self, + request: cloud_memcache.UpdateParametersRequest = None, + *, + name: str = None, + update_mask: field_mask.FieldMask = None, + parameters: cloud_memcache.MemcacheParameters = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Args: + request (:class:`~.cloud_memcache.UpdateParametersRequest`): + The request object. Request for + [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. + name (:class:`str`): + Required. Resource name of the + Memcached instance for which the + parameters should be updated. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parameters (:class:`~.cloud_memcache.MemcacheParameters`): + The parameters to apply to the + instance. + This corresponds to the ``parameters`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.cloud_memcache.Instance``: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, update_mask, parameters]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.UpdateParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if update_mask is not None: + request.update_mask = update_mask + if parameters is not None: + request.parameters = parameters + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_parameters, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: cloud_memcache.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Instance. + + Args: + request (:class:`~.cloud_memcache.DeleteInstanceRequest`): + The request object. Request for + [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. + name (:class:`str`): + Memcached instance resource name in the format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.empty.Empty``: A generic empty message that + you can re-use to avoid defining duplicated empty + messages in your APIs. A typical example is to use it as + the request or the response type of an API method. For + instance: + + :: + + service Foo { + rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + } + + The JSON representation for ``Empty`` is empty JSON + object ``{}``. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty.Empty, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def apply_parameters( + self, + request: cloud_memcache.ApplyParametersRequest = None, + *, + name: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""ApplyParameters will update current set of Parameters + to the set of specified nodes of the Memcached Instance. + + Args: + request (:class:`~.cloud_memcache.ApplyParametersRequest`): + The request object. Request for + [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. + name (:class:`str`): + Required. Resource name of the + Memcached instance for which parameter + group updates should be applied. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (:class:`Sequence[str]`): + Nodes to which we should apply the + instance-level parameter group. + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (:class:`bool`): + Whether to apply instance-level + parameter group to all nodes. If set to + true, will explicitly restrict users + from specifying any nodes, and apply + parameter group updates to all nodes + within the instance. + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.cloud_memcache.Instance``: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.ApplyParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if apply_all is not None: + request.apply_all = apply_all + + if node_ids: + request.node_ids.extend(node_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.apply_parameters, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 19c6bde..0a1ef7c 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -16,26 +16,33 @@ # from collections import OrderedDict +from distutils import util +import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.types import cloud_memcache from google.protobuf import empty_pb2 as empty # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from .transports.base import CloudMemcacheTransport +from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudMemcacheGrpcTransport +from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport class CloudMemcacheClientMeta(type): @@ -48,8 +55,9 @@ class CloudMemcacheClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] _transport_registry["grpc"] = CloudMemcacheGrpcTransport + _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None) -> Type[CloudMemcacheTransport]: + def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: """Return an appropriate transport class. Args: @@ -145,11 +153,20 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudMemcacheTransport: + """Return the transport used by the client instance. + + Returns: + CloudMemcacheTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod - def instance_path(project: str, location: str, instance: str) -> str: + def instance_path(project: str, location: str, instance: str,) -> str: """Return a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, location=location, instance=instance + project=project, location=location, instance=instance, ) @staticmethod @@ -161,12 +178,72 @@ def parse_instance_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudMemcacheTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, CloudMemcacheTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the cloud memcache client. @@ -179,66 +256,102 @@ def __init__( transport (Union[str, ~.CloudMemcacheTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. - (2) If ``transport`` argument is None, ``client_options`` can be - used to create a mutual TLS transport. If ``client_cert_source`` - is provided, mutual TLS transport will be created with the given - ``api_endpoint`` or the default mTLS endpoint, and the client - SSL credentials obtained from ``client_cert_source``. + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, CloudMemcacheTransport): # transport is a CloudMemcacheTransport instance. - if credentials: + if credentials or client_options.credentials_file: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) self._transport = transport - elif client_options is None or ( - client_options.api_endpoint is None - and client_options.client_cert_source is None - ): - # Don't trigger mTLS if we get an empty ClientOptions. + else: Transport = type(self).get_transport_class(transport) self._transport = Transport( - credentials=credentials, host=self.DEFAULT_ENDPOINT - ) - else: - # We have a non-empty ClientOptions. If client_cert_source is - # provided, trigger mTLS with user provided endpoint or the default - # mTLS endpoint. - if client_options.client_cert_source: - api_mtls_endpoint = ( - client_options.api_endpoint - if client_options.api_endpoint - else self.DEFAULT_MTLS_ENDPOINT - ) - else: - api_mtls_endpoint = None - - api_endpoint = ( - client_options.api_endpoint - if client_options.api_endpoint - else self.DEFAULT_ENDPOINT - ) - - self._transport = CloudMemcacheGrpcTransport( credentials=credentials, + credentials_file=client_options.credentials_file, host=api_endpoint, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=client_options.client_cert_source, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def list_instances( @@ -283,27 +396,29 @@ def list_instances( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloud_memcache.ListInstancesRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.ListInstancesRequest): + request = cloud_memcache.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_instances, - default_timeout=None, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_instances] # Certain fields should be provided within the metadata header; # add these here. @@ -312,12 +427,12 @@ def list_instances( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListInstancesPager( - method=rpc, request=request, response=response + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -360,25 +475,29 @@ def get_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloud_memcache.GetInstanceRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.GetInstanceRequest): + request = cloud_memcache.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_instance, default_timeout=None, client_info=_client_info - ) + rpc = self._transport._wrapped_methods[self._transport.get_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -387,7 +506,7 @@ def get_instance( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -455,34 +574,42 @@ def create_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, instance_id, resource]): + has_flattened_params = any([parent, instance_id, resource]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloud_memcache.CreateInstanceRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.CreateInstanceRequest): + request = cloud_memcache.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if resource is not None: - request.resource = resource + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if resource is not None: + request.resource = resource # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_instance, - default_timeout=None, - client_info=_client_info, + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( @@ -544,32 +671,42 @@ def update_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([update_mask, resource]): + has_flattened_params = any([update_mask, resource]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloud_memcache.UpdateInstanceRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.UpdateInstanceRequest): + request = cloud_memcache.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if update_mask is not None: - request.update_mask = update_mask - if resource is not None: - request.resource = resource + if update_mask is not None: + request.update_mask = update_mask + if resource is not None: + request.resource = resource # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.update_instance, - default_timeout=None, - client_info=_client_info, + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource.name", request.resource.name),) + ), ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( @@ -638,34 +775,42 @@ def update_parameters( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, update_mask, parameters]): + has_flattened_params = any([name, update_mask, parameters]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloud_memcache.UpdateParametersRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.UpdateParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.UpdateParametersRequest): + request = cloud_memcache.UpdateParametersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name - if update_mask is not None: - request.update_mask = update_mask - if parameters is not None: - request.parameters = parameters + if name is not None: + request.name = name + if update_mask is not None: + request.update_mask = update_mask + if parameters is not None: + request.parameters = parameters # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.update_parameters, - default_timeout=None, - client_info=_client_info, + rpc = self._transport._wrapped_methods[self._transport.update_parameters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( @@ -731,30 +876,38 @@ def delete_instance( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloud_memcache.DeleteInstanceRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.DeleteInstanceRequest): + request = cloud_memcache.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_instance, - default_timeout=None, - client_info=_client_info, + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( @@ -826,34 +979,43 @@ def apply_parameters( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, node_ids, apply_all]): + has_flattened_params = any([name, node_ids, apply_all]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloud_memcache.ApplyParametersRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.ApplyParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.ApplyParametersRequest): + request = cloud_memcache.ApplyParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. - # If we have keyword arguments corresponding to fields on the - # request, apply these. + if name is not None: + request.name = name + if apply_all is not None: + request.apply_all = apply_all - if name is not None: - request.name = name - if node_ids is not None: - request.node_ids = node_ids - if apply_all is not None: - request.apply_all = apply_all + if node_ids: + request.node_ids.extend(node_ids) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.apply_parameters, - default_timeout=None, - client_info=_client_info, + rpc = self._transport._wrapped_methods[self._transport.apply_parameters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( @@ -868,11 +1030,11 @@ def apply_parameters( try: - _client_info = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-memcache").version + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index be8dd7f..7e7696a 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -15,7 +15,7 @@ # limitations under the License. # -from typing import Any, Callable, Iterable +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple from google.cloud.memcache_v1beta2.types import cloud_memcache @@ -40,11 +40,11 @@ class ListInstancesPager: def __init__( self, - method: Callable[ - [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse - ], + method: Callable[..., cloud_memcache.ListInstancesResponse], request: cloud_memcache.ListInstancesRequest, response: cloud_memcache.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -55,10 +55,13 @@ def __init__( The initial request object. response (:class:`~.cloud_memcache.ListInstancesResponse`): The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. """ self._method = method self._request = cloud_memcache.ListInstancesRequest(request) self._response = response + self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @@ -68,7 +71,7 @@ def pages(self) -> Iterable[cloud_memcache.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request) + self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterable[cloud_memcache.Instance]: @@ -77,3 +80,69 @@ def __iter__(self) -> Iterable[cloud_memcache.Instance]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`~.cloud_memcache.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``resources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``resources`` field on the + corresponding responses. + + All the usual :class:`~.cloud_memcache.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloud_memcache.ListInstancesResponse]], + request: cloud_memcache.ListInstancesRequest, + response: cloud_memcache.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloud_memcache.ListInstancesRequest`): + The initial request object. + response (:class:`~.cloud_memcache.ListInstancesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_memcache.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloud_memcache.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[cloud_memcache.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.resources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py index 33272b6..38122c6 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py @@ -20,11 +20,16 @@ from .base import CloudMemcacheTransport from .grpc import CloudMemcacheGrpcTransport +from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] _transport_registry["grpc"] = CloudMemcacheGrpcTransport +_transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport - -__all__ = ("CloudMemcacheTransport", "CloudMemcacheGrpcTransport") +__all__ = ( + "CloudMemcacheTransport", + "CloudMemcacheGrpcTransport", + "CloudMemcacheGrpcAsyncIOTransport", +) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index e166ead..3a96e70 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -17,8 +17,12 @@ import abc import typing +import pkg_resources -from google import auth +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials # type: ignore @@ -26,7 +30,15 @@ from google.longrunning import operations_pb2 as operations # type: ignore -class CloudMemcacheTransport(metaclass=abc.ABCMeta): +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) @@ -36,6 +48,11 @@ def __init__( *, host: str = "memcache.googleapis.com", credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, ) -> None: """Instantiate the transport. @@ -46,6 +63,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -54,62 +82,125 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. - if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) # Save the credentials. self._credentials = credentials + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, default_timeout=1200.0, client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, default_timeout=1200.0, client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, default_timeout=1200.0, client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, default_timeout=1200.0, client_info=client_info, + ), + self.update_parameters: gapic_v1.method.wrap_method( + self.update_parameters, default_timeout=1200.0, client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, default_timeout=1200.0, client_info=client_info, + ), + self.apply_parameters: gapic_v1.method.wrap_method( + self.apply_parameters, default_timeout=1200.0, client_info=client_info, + ), + } + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" - raise NotImplementedError + raise NotImplementedError() @property def list_instances( - self + self, ) -> typing.Callable[ - [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse + [cloud_memcache.ListInstancesRequest], + typing.Union[ + cloud_memcache.ListInstancesResponse, + typing.Awaitable[cloud_memcache.ListInstancesResponse], + ], ]: - raise NotImplementedError + raise NotImplementedError() @property def get_instance( - self - ) -> typing.Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: - raise NotImplementedError + self, + ) -> typing.Callable[ + [cloud_memcache.GetInstanceRequest], + typing.Union[ + cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] + ], + ]: + raise NotImplementedError() @property def create_instance( - self - ) -> typing.Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: - raise NotImplementedError + self, + ) -> typing.Callable[ + [cloud_memcache.CreateInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() @property def update_instance( - self - ) -> typing.Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: - raise NotImplementedError + self, + ) -> typing.Callable[ + [cloud_memcache.UpdateInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() @property def update_parameters( - self + self, ) -> typing.Callable[ - [cloud_memcache.UpdateParametersRequest], operations.Operation + [cloud_memcache.UpdateParametersRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: - raise NotImplementedError + raise NotImplementedError() @property def delete_instance( - self - ) -> typing.Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: - raise NotImplementedError + self, + ) -> typing.Callable[ + [cloud_memcache.DeleteInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() @property def apply_parameters( - self - ) -> typing.Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: - raise NotImplementedError + self, + ) -> typing.Callable[ + [cloud_memcache.ApplyParametersRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() __all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 0d09f79..09e1170 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -15,20 +15,22 @@ # limitations under the License. # -from typing import Callable, Dict, Tuple +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache from google.longrunning import operations_pb2 as operations # type: ignore -from .base import CloudMemcacheTransport +from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO class CloudMemcacheGrpcTransport(CloudMemcacheTransport): @@ -63,14 +65,21 @@ class CloudMemcacheGrpcTransport(CloudMemcacheTransport): top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] + def __init__( self, *, host: str = "memcache.googleapis.com", credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -82,21 +91,39 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -104,13 +131,24 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -122,56 +160,103 @@ def __init__( ssl_credentials = SslCredentials().ssl_credentials # create a new channel. The provided one is ignored. - self._grpc_channel = grpc_helpers.create_channel( + self._grpc_channel = type(self).create_channel( host, credentials=credentials, + credentials_file=credentials_file, ssl_credentials=ssl_credentials, - scopes=self.AUTH_SCOPES, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) - # Run the base constructor. - super().__init__(host=host, credentials=credentials) self._stubs = {} # type: Dict[str, Callable] + self._operations_client = None + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) @classmethod def create_channel( cls, host: str = "memcache.googleapis.com", credentials: credentials.Credentials = None, - **kwargs + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. """ + scopes = scopes or cls.AUTH_SCOPES return grpc_helpers.create_channel( - host, credentials=credentials, scopes=cls.AUTH_SCOPES, **kwargs + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials - ) - - # Return the channel from cache. return self._grpc_channel @property @@ -182,17 +267,15 @@ def operations_client(self) -> operations_v1.OperationsClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsClient( - self.grpc_channel - ) + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def list_instances( - self + self, ) -> Callable[ [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse ]: @@ -220,7 +303,7 @@ def list_instances( @property def get_instance( - self + self, ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: r"""Return a callable for the get instance method over gRPC. @@ -246,7 +329,7 @@ def get_instance( @property def create_instance( - self + self, ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: r"""Return a callable for the create instance method over gRPC. @@ -273,7 +356,7 @@ def create_instance( @property def update_instance( - self + self, ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: r"""Return a callable for the update instance method over gRPC. @@ -300,7 +383,7 @@ def update_instance( @property def update_parameters( - self + self, ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: r"""Return a callable for the update parameters method over gRPC. @@ -329,7 +412,7 @@ def update_parameters( @property def delete_instance( - self + self, ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: r"""Return a callable for the delete instance method over gRPC. @@ -355,7 +438,7 @@ def delete_instance( @property def apply_parameters( - self + self, ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: r"""Return a callable for the apply parameters method over gRPC. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py new file mode 100644 index 0000000..009acf5 --- /dev/null +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.memcache_v1beta2.types import cloud_memcache +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudMemcacheGrpcTransport + + +class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): + """gRPC AsyncIO backend transport for CloudMemcache. + + Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be refering to a GCP ``region``; for + example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + self._operations_client = None + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [cloud_memcache.ListInstancesRequest], + Awaitable[cloud_memcache.ListInstancesResponse], + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/ListInstances", + request_serializer=cloud_memcache.ListInstancesRequest.serialize, + response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[ + [cloud_memcache.GetInstanceRequest], Awaitable[cloud_memcache.Instance] + ]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/GetInstance", + request_serializer=cloud_memcache.GetInstanceRequest.serialize, + response_deserializer=cloud_memcache.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[ + [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", + request_serializer=cloud_memcache.CreateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[ + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the update instance method over gRPC. + + Updates an existing Instance in a given project and + location. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", + request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def update_parameters( + self, + ) -> Callable[ + [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the update parameters method over gRPC. + + Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Returns: + Callable[[~.UpdateParametersRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_parameters" not in self._stubs: + self._stubs["update_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", + request_serializer=cloud_memcache.UpdateParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_parameters"] + + @property + def delete_instance( + self, + ) -> Callable[ + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", + request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def apply_parameters( + self, + ) -> Callable[ + [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the apply parameters method over gRPC. + + ApplyParameters will update current set of Parameters + to the set of specified nodes of the Memcached Instance. + + Returns: + Callable[[~.ApplyParametersRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_parameters" not in self._stubs: + self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", + request_serializer=cloud_memcache.ApplyParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_parameters"] + + +__all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index f28a4dc..8619a5a 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -29,9 +29,9 @@ OperationMetadata, LocationMetadata, ZoneMetadata, + MemcacheVersion, ) - __all__ = ( "Instance", "ListInstancesRequest", @@ -46,4 +46,5 @@ "OperationMetadata", "LocationMetadata", "ZoneMetadata", + "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 5a0423f..36cd39e 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -75,8 +75,8 @@ class Instance(proto.Message): resources authorized_network (str): Optional. The full name of the Google Compute Engine - `network `__ - to which the instance is connected. If left unspecified, the + `network `__ to which + the instance is connected. If left unspecified, the ``default`` network will be used. zones (Sequence[str]): Optional. Zones where Memcached nodes should @@ -148,6 +148,7 @@ class NodeConfig(proto.Message): """ cpu_count = proto.Field(proto.INT32, number=1) + memory_size_mb = proto.Field(proto.INT32, number=2) class Node(proto.Message): @@ -187,11 +188,16 @@ class State(proto.Enum): UPDATING = 4 node_id = proto.Field(proto.STRING, number=1) + zone = proto.Field(proto.STRING, number=2) - state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State") + + state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) + host = proto.Field(proto.STRING, number=4) + port = proto.Field(proto.INT32, number=5) - parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters") + + parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) class InstanceMessage(proto.Message): r""" @@ -210,26 +216,42 @@ class Code(proto.Enum): CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 - code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code") + code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) + message = proto.Field(proto.STRING, number=2) name = proto.Field(proto.STRING, number=1) + display_name = proto.Field(proto.STRING, number=2) + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + authorized_network = proto.Field(proto.STRING, number=4) + zones = proto.RepeatedField(proto.STRING, number=5) + node_count = proto.Field(proto.INT32, number=6) - node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig) - memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion") - parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters") - memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node) - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp) - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp) - state = proto.Field(proto.ENUM, number=15, enum=State) + + node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) + + memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) + + parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) + + memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) + + create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + + state = proto.Field(proto.ENUM, number=15, enum=State,) + memcache_full_version = proto.Field(proto.STRING, number=18) + instance_messages = proto.RepeatedField( - proto.MESSAGE, number=19, message=InstanceMessage + proto.MESSAGE, number=19, message=InstanceMessage, ) + discovery_endpoint = proto.Field(proto.STRING, number=20) @@ -264,9 +286,13 @@ class ListInstancesRequest(proto.Message): """ parent = proto.Field(proto.STRING, number=1) + page_size = proto.Field(proto.INT32, number=2) + page_token = proto.Field(proto.STRING, number=3) + filter = proto.Field(proto.STRING, number=4) + order_by = proto.Field(proto.STRING, number=5) @@ -294,8 +320,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - resources = proto.RepeatedField(proto.MESSAGE, number=1, message=Instance) + resources = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) + next_page_token = proto.Field(proto.STRING, number=2) + unreachable = proto.RepeatedField(proto.STRING, number=3) @@ -337,8 +365,10 @@ class CreateInstanceRequest(proto.Message): """ parent = proto.Field(proto.STRING, number=1) + instance_id = proto.Field(proto.STRING, number=2) - resource = proto.Field(proto.MESSAGE, number=3, message=Instance) + + resource = proto.Field(proto.MESSAGE, number=3, message="Instance",) class UpdateInstanceRequest(proto.Message): @@ -355,8 +385,9 @@ class UpdateInstanceRequest(proto.Message): specified in update_mask are updated. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask) - resource = proto.Field(proto.MESSAGE, number=2, message=Instance) + update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) + + resource = proto.Field(proto.MESSAGE, number=2, message="Instance",) class DeleteInstanceRequest(proto.Message): @@ -394,7 +425,9 @@ class ApplyParametersRequest(proto.Message): """ name = proto.Field(proto.STRING, number=1) + node_ids = proto.RepeatedField(proto.STRING, number=2) + apply_all = proto.Field(proto.BOOL, number=3) @@ -414,8 +447,10 @@ class UpdateParametersRequest(proto.Message): """ name = proto.Field(proto.STRING, number=1) - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask) - parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters") + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) class MemcacheParameters(proto.Message): @@ -435,6 +470,7 @@ class MemcacheParameters(proto.Message): """ id = proto.Field(proto.STRING, number=1) + params = proto.MapField(proto.STRING, proto.STRING, number=3) @@ -464,12 +500,18 @@ class OperationMetadata(proto.Message): API version used to start the operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + target = proto.Field(proto.STRING, number=3) + verb = proto.Field(proto.STRING, number=4) + status_detail = proto.Field(proto.STRING, number=5) + cancel_requested = proto.Field(proto.BOOL, number=6) + api_version = proto.Field(proto.STRING, number=7) @@ -486,7 +528,7 @@ class LocationMetadata(proto.Message): """ available_zones = proto.MapField( - proto.STRING, proto.MESSAGE, number=1, message="ZoneMetadata" + proto.STRING, proto.MESSAGE, number=1, message="ZoneMetadata", ) diff --git a/noxfile.py b/noxfile.py index 8563032..8004482 100644 --- a/noxfile.py +++ b/noxfile.py @@ -23,14 +23,15 @@ import nox -BLACK_VERSION = "black==19.3b0" +BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -if os.path.exists("samples"): - BLACK_PATHS.append("samples") +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -38,7 +39,9 @@ def lint(session): serious code quality issues. """ session.install("flake8", BLACK_VERSION) - session.run("black", "--check", *BLACK_PATHS) + session.run( + "black", "--check", *BLACK_PATHS, + ) session.run("flake8", "google", "tests") @@ -53,10 +56,12 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run( + "black", *BLACK_PATHS, + ) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -65,16 +70,19 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") + session.install("asyncmock", "pytest-asyncio") + + session.install( + "mock", "pytest", "pytest-cov", + ) session.install("-e", ".") # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google.cloud.memcache", - "--cov=google.cloud", - "--cov=tests.unit", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", @@ -84,17 +92,21 @@ def default(session): ) -@nox.session(python=["3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["3.7"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -110,7 +122,9 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils") + session.install( + "mock", "pytest", "google-cloud-testutils", + ) session.install("-e", ".") # Run py.test against the system tests. @@ -120,7 +134,7 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -133,7 +147,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" @@ -153,3 +167,38 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh new file mode 100755 index 0000000..21f6d2a --- /dev/null +++ b/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py new file mode 100644 index 0000000..459ba63 --- /dev/null +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -0,0 +1,185 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class memcacheCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'resource', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'resource', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=memcacheCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the memcache client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py new file mode 100644 index 0000000..d309d6e --- /dev/null +++ b/scripts/readme-gen/readme_gen.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 0000000..4fd2397 --- /dev/null +++ b/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 0000000..1446b94 --- /dev/null +++ b/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 0000000..11957ce --- /dev/null +++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 0000000..a0406db --- /dev/null +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 0000000..5ea33d1 --- /dev/null +++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/setup.py b/setup.py index 31d56ba..bae28ee 100644 --- a/setup.py +++ b/setup.py @@ -40,8 +40,8 @@ platforms="Posix; MacOS X; Windows", include_package_data=True, install_requires=( - "google-api-core[grpc] >= 1.17.0, < 2.0.0dev", - "proto-plus >= 0.4.0", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "proto-plus >= 1.4.0", ), python_requires=">=3.6", setup_requires=["libcst >= 0.2.5"], diff --git a/synth.metadata b/synth.metadata index 55c0f24..b83959d 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,23 +3,22 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-memcache.git", - "sha": "e34e2b1a2b47476cb6a0dcd932dcfd030018936f" + "remote": "git@github.com:googleapis/python-memcache", + "sha": "b068bfca843c0d792bb2b79f5b6b28fcc80ae7c8" } }, { "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "eafa840ceec23b44a5c21670288107c661252711", - "internalRef": "313488995" + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "71b8a272549c06b5768d00fa48d3ae990e871bec" + "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" } } ], @@ -30,7 +29,7 @@ "apiName": "memcache", "apiVersion": "v1beta2", "language": "python", - "generator": "gapic-generator-python" + "generator": "bazel" } } ] diff --git a/synth.py b/synth.py index 619b015..4b2ffa1 100644 --- a/synth.py +++ b/synth.py @@ -19,53 +19,38 @@ import synthtool.gcp as gcp from synthtool.languages import python -gapic = gcp.GAPICMicrogenerator() +gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() +versions = ["v1beta2"] + # ---------------------------------------------------------------------------- # Generate memcache GAPIC layer # ---------------------------------------------------------------------------- -library = gapic.py_library("memcache", "v1beta2") - -# TODO: remove /docs/memcache_v1beta2/*.rst files after fix is released in -# gapic-generator-python 0.19.0 -excludes = [ - "setup.py", - "docs/index.rst", - "docs/memcache_v1beta2/services.rst", - "docs/memcache_v1beta2/types.rst", -] -s.move(library, excludes=excludes) +for version in versions: + library = gapic.py_library( + service="memcache", + version=version, + bazel_target=f"//google/cloud/memcache/{version}:memcache-{version}-py") + + excludes = [ + "setup.py", + "docs/index.rst", + ] + s.move(library, excludes=excludes) # Add extra linebreak after bulleted list to appease sphinx -s.replace("google/**/client.py", """- Must be unique within the user project / location""", +s.replace("google/**/*client.py", """- Must be unique within the user project / location""", """- Must be unique within the user project / location\n""") -s.replace("google/**/client.py", "- ``displayName``", "- ``displayName``\n") +s.replace("google/**/*client.py", "- ``displayName``", "- ``displayName``\n") -# correct license headers -python.fix_pb2_headers() -python.fix_pb2_grpc_headers() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=100) +templated_files = common.py_library(cov_level=100, microgenerator=True) s.move( templated_files, excludes=[".coveragerc"] ) # the microgenerator has a good coveragerc file -s.replace( - ".gitignore", "bigquery/docs/generated", "htmlcov" -) # temporary hack to ignore htmlcov - -# Remove 2.7 and 3.5 tests from noxfile.py -s.replace("noxfile.py", """\["2\.7", """, "[") -s.replace("noxfile.py", """"3.5", """, "") - -# Expand flake errors permitted to accomodate the Microgenerator -# TODO: remove extra error codes once issues below are resolved -# E712: https://github.com/googleapis/gapic-generator-python/issues/322 -# F401: https://github.com/googleapis/gapic-generator-python/issues/324 -# F841: https://github.com/googleapis/gapic-generator-python/issues/323 -s.replace(".flake8", "ignore = .*", "ignore = E203, E266, E501, W503, F401, F841, E712") s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/.gitignore b/testing/.gitignore new file mode 100644 index 0000000..b05fbd6 --- /dev/null +++ b/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/tests/unit/gapic/memcache_v1beta2/__init__.py b/tests/unit/gapic/memcache_v1beta2/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/unit/gapic/memcache_v1beta2/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py new file mode 100644 index 0000000..3050c18 --- /dev/null +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -0,0 +1,2529 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.memcache_v1beta2.services.cloud_memcache import ( + CloudMemcacheAsyncClient, +) +from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient +from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers +from google.cloud.memcache_v1beta2.services.cloud_memcache import transports +from google.cloud.memcache_v1beta2.types import cloud_memcache +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudMemcacheClient._get_default_mtls_endpoint(None) is None + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient] +) +def test_cloud_memcache_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + + assert client.transport._host == "memcache.googleapis.com:443" + + +def test_cloud_memcache_client_get_transport_class(): + transport = CloudMemcacheClient.get_transport_class() + assert transport == transports.CloudMemcacheGrpcTransport + + transport = CloudMemcacheClient.get_transport_class("grpc") + assert transport == transports.CloudMemcacheGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + CloudMemcacheClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheClient), +) +@mock.patch.object( + CloudMemcacheAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheAsyncClient), +) +def test_cloud_memcache_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "true"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "false"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + CloudMemcacheClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheClient), +) +@mock.patch.object( + CloudMemcacheAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_memcache_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_memcache_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_memcache_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_memcache_client_client_options_from_dict(): + with mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudMemcacheClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_instances( + transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListInstancesPager) + + assert response.next_page_token == "next_page_token_value" + + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_from_dict(): + test_list_instances(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = cloud_memcache.ListInstancesResponse() + + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse() + ) + + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_instances_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_instances_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_memcache.ListInstancesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + cloud_memcache.ListInstancesRequest(), parent="parent_value", + ) + + +def test_list_instances_pager(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + resources=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + resources=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, cloud_memcache.Instance) for i in results) + + +def test_list_instances_pages(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + resources=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + resources=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + resources=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + resources=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_memcache.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + resources=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + resources=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_instances(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_instance( + transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance( + name="name_value", + display_name="display_name_value", + authorized_network="authorized_network_value", + zones=["zones_value"], + node_count=1070, + memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, + state=cloud_memcache.Instance.State.CREATING, + memcache_full_version="memcache_full_version_value", + discovery_endpoint="discovery_endpoint_value", + ) + + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, cloud_memcache.Instance) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.authorized_network == "authorized_network_value" + + assert response.zones == ["zones_value"] + + assert response.node_count == 1070 + + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + + assert response.state == cloud_memcache.Instance.State.CREATING + + assert response.memcache_full_version == "memcache_full_version_value" + + assert response.discovery_endpoint == "discovery_endpoint_value" + + +def test_get_instance_from_dict(): + test_get_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance( + name="name_value", + display_name="display_name_value", + authorized_network="authorized_network_value", + zones=["zones_value"], + node_count=1070, + memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, + state=cloud_memcache.Instance.State.CREATING, + memcache_full_version="memcache_full_version_value", + discovery_endpoint="discovery_endpoint_value", + ) + ) + + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_memcache.Instance) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.authorized_network == "authorized_network_value" + + assert response.zones == ["zones_value"] + + assert response.node_count == 1070 + + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + + assert response.state == cloud_memcache.Instance.State.CREATING + + assert response.memcache_full_version == "memcache_full_version_value" + + assert response.discovery_endpoint == "discovery_endpoint_value" + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = cloud_memcache.Instance() + + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance() + ) + + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_memcache.GetInstanceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + cloud_memcache.GetInstanceRequest(), name="name_value", + ) + + +def test_create_instance( + transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_from_dict(): + test_create_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance_id="instance_id_value", + resource=cloud_memcache.Instance(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].instance_id == "instance_id_value" + + assert args[0].resource == cloud_memcache.Instance(name="name_value") + + +def test_create_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_memcache.CreateInstanceRequest(), + parent="parent_value", + instance_id="instance_id_value", + resource=cloud_memcache.Instance(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance_id="instance_id_value", + resource=cloud_memcache.Instance(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].instance_id == "instance_id_value" + + assert args[0].resource == cloud_memcache.Instance(name="name_value") + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + cloud_memcache.CreateInstanceRequest(), + parent="parent_value", + instance_id="instance_id_value", + resource=cloud_memcache.Instance(name="name_value"), + ) + + +def test_update_instance( + transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_from_dict(): + test_update_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateInstanceRequest() + request.resource.name = "resource.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource.name=resource.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateInstanceRequest() + request.resource.name = "resource.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource.name=resource.name/value",) in kw[ + "metadata" + ] + + +def test_update_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + update_mask=field_mask.FieldMask(paths=["paths_value"]), + resource=cloud_memcache.Instance(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + assert args[0].resource == cloud_memcache.Instance(name="name_value") + + +def test_update_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_memcache.UpdateInstanceRequest(), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + resource=cloud_memcache.Instance(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + update_mask=field_mask.FieldMask(paths=["paths_value"]), + resource=cloud_memcache.Instance(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + assert args[0].resource == cloud_memcache.Instance(name="name_value") + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + cloud_memcache.UpdateInstanceRequest(), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + resource=cloud_memcache.Instance(name="name_value"), + ) + + +def test_update_parameters( + transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_parameters_from_dict(): + test_update_parameters(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_parameters_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_parameters_async_from_dict(): + await test_update_parameters_async(request_type=dict) + + +def test_update_parameters_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_parameters_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_parameters_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_parameters( + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + + +def test_update_parameters_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_parameters( + cloud_memcache.UpdateParametersRequest(), + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + +@pytest.mark.asyncio +async def test_update_parameters_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_parameters( + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + + +@pytest.mark.asyncio +async def test_update_parameters_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_parameters( + cloud_memcache.UpdateParametersRequest(), + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + +def test_delete_instance( + transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_from_dict(): + test_delete_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_memcache.DeleteInstanceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + cloud_memcache.DeleteInstanceRequest(), name="name_value", + ) + + +def test_apply_parameters( + transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_parameters_from_dict(): + test_apply_parameters(request_type=dict) + + +@pytest.mark.asyncio +async def test_apply_parameters_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_parameters_async_from_dict(): + await test_apply_parameters_async(request_type=dict) + + +def test_apply_parameters_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_apply_parameters_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_apply_parameters_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.apply_parameters( + name="name_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +def test_apply_parameters_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_parameters( + cloud_memcache.ApplyParametersRequest(), + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +@pytest.mark.asyncio +async def test_apply_parameters_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.apply_parameters( + name="name_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +@pytest.mark.asyncio +async def test_apply_parameters_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.apply_parameters( + cloud_memcache.ApplyParametersRequest(), + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = CloudMemcacheClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudMemcacheGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) + + +def test_cloud_memcache_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.CloudMemcacheTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_memcache_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudMemcacheTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "update_parameters", + "delete_instance", + "apply_parameters", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +def test_cloud_memcache_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_memcache_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport() + adc.assert_called_once() + + +def test_cloud_memcache_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + CloudMemcacheClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_cloud_memcache_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_memcache_host_no_port(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="memcache.googleapis.com" + ), + ) + assert client.transport._host == "memcache.googleapis.com:443" + + +def test_cloud_memcache_host_with_port(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="memcache.googleapis.com:8000" + ), + ) + assert client.transport._host == "memcache.googleapis.com:8000" + + +def test_cloud_memcache_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_memcache_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.CloudMemcacheGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cloud_memcache_grpc_lro_client(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_memcache_grpc_lro_async_client(): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, location=location, instance=instance, + ) + actual = CloudMemcacheClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = CloudMemcacheClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_instance_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudMemcacheClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudMemcacheClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + + expected = "folders/{folder}".format(folder=folder,) + actual = CloudMemcacheClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudMemcacheClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = CloudMemcacheClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudMemcacheClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = CloudMemcacheClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudMemcacheClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = CloudMemcacheClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudMemcacheClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudMemcacheTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudMemcacheTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudMemcacheClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/memcache_v1beta2/test_cloud_memcache.py deleted file mode 100644 index b7e87f4..0000000 --- a/tests/unit/memcache_v1beta2/test_cloud_memcache.py +++ /dev/null @@ -1,929 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from unittest import mock - -import grpc -import math -import pytest - -from google import auth -from google.api_core import client_options -from google.api_core import future -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.auth import credentials -from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient -from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers -from google.cloud.memcache_v1beta2.services.cloud_memcache import transports -from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudMemcacheClient._get_default_mtls_endpoint(None) is None - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(api_endpoint) - == api_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - ) - - -def test_cloud_memcache_client_from_service_account_file(): - creds = credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = CloudMemcacheClient.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds - - client = CloudMemcacheClient.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds - - assert client._transport._host == "memcache.googleapis.com:443" - - -def test_cloud_memcache_client_client_options(): - # Check that if channel is provided we won't create a new one. - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.CloudMemcacheClient.get_transport_class" - ) as gtc: - transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials() - ) - client = CloudMemcacheClient(transport=transport) - gtc.assert_not_called() - - # Check mTLS is not triggered with empty client options. - options = client_options.ClientOptions() - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.CloudMemcacheClient.get_transport_class" - ) as gtc: - transport = gtc.return_value = mock.MagicMock() - client = CloudMemcacheClient(client_options=options) - transport.assert_called_once_with( - credentials=None, host=client.DEFAULT_ENDPOINT - ) - - # Check mTLS is not triggered if api_endpoint is provided but - # client_cert_source is None. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = CloudMemcacheClient(client_options=options) - grpc_transport.assert_called_once_with( - api_mtls_endpoint=None, - client_cert_source=None, - credentials=None, - host="squid.clam.whelk", - ) - - # Check mTLS is triggered if client_cert_source is provided. - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = CloudMemcacheClient(client_options=options) - grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - credentials=None, - host=client.DEFAULT_ENDPOINT, - ) - - # Check mTLS is triggered if api_endpoint and client_cert_source are provided. - options = client_options.ClientOptions( - api_endpoint="squid.clam.whelk", client_cert_source=client_cert_source_callback - ) - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = CloudMemcacheClient(client_options=options) - grpc_transport.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=client_cert_source_callback, - credentials=None, - host="squid.clam.whelk", - ) - - -def test_cloud_memcache_client_client_options_from_dict(): - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = CloudMemcacheClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - api_mtls_endpoint=None, - client_cert_source=None, - credentials=None, - host="squid.clam.whelk", - ) - - -def test_list_instances(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = cloud_memcache.ListInstancesRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", unreachable=["unreachable_value"] - ) - - response = client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ListInstancesRequest(parent="parent/value") - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: - call.return_value = cloud_memcache.ListInstancesResponse() - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value") in kw["metadata"] - - -def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.ListInstancesResponse() - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.list_instances(parent="parent_value") - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - - -def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - cloud_memcache.ListInstancesRequest(), parent="parent_value" - ) - - -def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def"), - cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance()], next_page_token="ghi" - ), - cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(), cloud_memcache.Instance()] - ), - RuntimeError, - ) - results = [i for i in client.list_instances(request={})] - assert len(results) == 6 - assert all(isinstance(i, cloud_memcache.Instance) for i in results) - - -def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def"), - cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance()], next_page_token="ghi" - ), - cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(), cloud_memcache.Instance()] - ), - RuntimeError, - ) - pages = list(client.list_instances(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token - - -def test_get_instance(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = cloud_memcache.GetInstanceRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.Instance( - name="name_value", - display_name="display_name_value", - authorized_network="authorized_network_value", - zones=["zones_value"], - node_count=1070, - memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, - state=cloud_memcache.Instance.State.CREATING, - memcache_full_version="memcache_full_version_value", - discovery_endpoint="discovery_endpoint_value", - ) - - response = client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - - -def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.GetInstanceRequest(name="name/value") - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: - call.return_value = cloud_memcache.Instance() - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value") in kw["metadata"] - - -def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.Instance() - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.get_instance(name="name_value") - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - -def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance(cloud_memcache.GetInstanceRequest(), name="name_value") - - -def test_create_instance(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = cloud_memcache.CreateInstanceRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - - response = client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.create_instance( - parent="parent_value", - instance_id="instance_id_value", - resource=cloud_memcache.Instance(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance_id == "instance_id_value" - assert args[0].resource == cloud_memcache.Instance(name="name_value") - - -def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - cloud_memcache.CreateInstanceRequest(), - parent="parent_value", - instance_id="instance_id_value", - resource=cloud_memcache.Instance(name="name_value"), - ) - - -def test_update_instance(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = cloud_memcache.UpdateInstanceRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - - response = client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.update_instance( - update_mask=field_mask.FieldMask(paths=["paths_value"]), - resource=cloud_memcache.Instance(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - assert args[0].resource == cloud_memcache.Instance(name="name_value") - - -def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask.FieldMask(paths=["paths_value"]), - resource=cloud_memcache.Instance(name="name_value"), - ) - - -def test_update_parameters(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = cloud_memcache.UpdateParametersRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - - response = client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.update_parameters( - name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") - - -def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_parameters( - cloud_memcache.UpdateParametersRequest(), - name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - -def test_delete_instance(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = cloud_memcache.DeleteInstanceRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - - response = client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.delete_instance(name="name_value") - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - -def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), name="name_value" - ) - - -def test_apply_parameters(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = cloud_memcache.ApplyParametersRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.apply_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - - response = client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.apply_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.apply_parameters( - name="name_value", node_ids=["node_ids_value"], apply_all=True - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True - - -def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.apply_parameters( - cloud_memcache.ApplyParametersRequest(), - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials() - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials() - ) - client = CloudMemcacheClient(transport=transport) - assert client._transport is transport - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials()) - assert isinstance(client._transport, transports.CloudMemcacheGrpcTransport) - - -def test_cloud_memcache_base_transport(): - # Instantiate the base transport. - transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials() - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_instances", - "get_instance", - "create_instance", - "update_instance", - "update_parameters", - "delete_instance", - "apply_parameters", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - -def test_cloud_memcache_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - CloudMemcacheClient() - adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",) - ) - - -def test_cloud_memcache_host_no_port(): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="memcache.googleapis.com" - ), - transport="grpc", - ) - assert client._transport._host == "memcache.googleapis.com:443" - - -def test_cloud_memcache_host_with_port(): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="memcache.googleapis.com:8000" - ), - transport="grpc", - ) - assert client._transport._host == "memcache.googleapis.com:8000" - - -def test_cloud_memcache_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") - - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() - transport = transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_cloud_memcache_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - ssl_credentials=mock_ssl_cred, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ) - assert transport.grpc_channel == mock_grpc_channel - - -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_cloud_memcache_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - ssl_credentials=mock_ssl_cred, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cloud_memcache_grpc_lro_client(): - client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc" - ) - transport = client._transport - - # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_path(): - project = "squid" - location = "clam" - instance = "whelk" - - expected = "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, location=location, instance=instance - ) - actual = CloudMemcacheClient.instance_path(project, location, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = {"project": "octopus", "location": "oyster", "instance": "nudibranch"} - path = CloudMemcacheClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_instance_path(path) - assert expected == actual From 2658465df5b7973c826d0b287f3674f146a1e8c2 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Fri, 29 Jan 2021 17:10:03 -0800 Subject: [PATCH 007/159] build: migrate to flakybot (#35) --- .kokoro/test-samples.sh | 8 ++++---- .kokoro/trampoline_v2.sh | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index f2285be..87c7159 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot fi if [[ $EXIT -ne 0 ]]; then diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 719bcd5..4af6cdc 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) From 7945dafbbee1b21efc733e079044db77e880a10a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 10 Feb 2021 12:36:11 -0500 Subject: [PATCH 008/159] feat: generate v1 (#37) --- .github/header-checker-lint.yml | 15 + .kokoro/build.sh | 16 +- .kokoro/docs/docs-presubmit.cfg | 11 + .trampolinerc | 1 + CONTRIBUTING.rst | 22 +- MANIFEST.in | 4 +- docs/_static/custom.css | 7 +- docs/index.rst | 21 +- docs/memcache_v1/cloud_memcache.rst | 11 + docs/memcache_v1/services.rst | 6 + docs/memcache_v1/types.rst | 7 + docs/memcache_v1beta2/cloud_memcache.rst | 11 + docs/memcache_v1beta2/services.rst | 6 +- docs/memcache_v1beta2/types.rst | 1 + google/cloud/memcache/__init__.py | 34 +- google/cloud/memcache_v1/__init__.py | 47 + google/cloud/memcache_v1/py.typed | 2 + google/cloud/memcache_v1/services/__init__.py | 16 + .../services/cloud_memcache/__init__.py | 24 + .../services/cloud_memcache/async_client.py | 857 ++++++ .../services/cloud_memcache/client.py | 1063 +++++++ .../services/cloud_memcache/pagers.py | 157 + .../cloud_memcache/transports/__init__.py | 35 + .../cloud_memcache/transports/base.py | 206 ++ .../cloud_memcache/transports/grpc.py | 477 +++ .../cloud_memcache/transports/grpc_asyncio.py | 496 ++++ google/cloud/memcache_v1/types/__init__.py | 46 + .../cloud/memcache_v1/types/cloud_memcache.py | 518 ++++ google/cloud/memcache_v1beta2/__init__.py | 2 + .../services/cloud_memcache/async_client.py | 241 +- .../services/cloud_memcache/client.py | 303 +- .../services/cloud_memcache/pagers.py | 27 +- .../cloud_memcache/transports/base.py | 14 + .../cloud_memcache/transports/grpc.py | 71 +- .../cloud_memcache/transports/grpc_asyncio.py | 73 +- .../cloud/memcache_v1beta2/types/__init__.py | 2 + .../memcache_v1beta2/types/cloud_memcache.py | 226 +- noxfile.py | 12 + scripts/fixup_memcache_v1_keywords.py | 185 ++ scripts/fixup_memcache_v1beta2_keywords.py | 1 + synth.py | 4 +- tests/unit/gapic/memcache_v1/__init__.py | 1 + .../gapic/memcache_v1/test_cloud_memcache.py | 2575 +++++++++++++++++ .../memcache_v1beta2/test_cloud_memcache.py | 434 ++- 44 files changed, 7891 insertions(+), 397 deletions(-) create mode 100644 .github/header-checker-lint.yml create mode 100644 docs/memcache_v1/cloud_memcache.rst create mode 100644 docs/memcache_v1/services.rst create mode 100644 docs/memcache_v1/types.rst create mode 100644 docs/memcache_v1beta2/cloud_memcache.rst create mode 100644 google/cloud/memcache_v1/__init__.py create mode 100644 google/cloud/memcache_v1/py.typed create mode 100644 google/cloud/memcache_v1/services/__init__.py create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/__init__.py create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/async_client.py create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/client.py create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/pagers.py create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/base.py create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py create mode 100644 google/cloud/memcache_v1/types/__init__.py create mode 100644 google/cloud/memcache_v1/types/cloud_memcache.py create mode 100644 scripts/fixup_memcache_v1_keywords.py create mode 100644 tests/unit/gapic/memcache_v1/__init__.py create mode 100644 tests/unit/gapic/memcache_v1/test_cloud_memcache.py diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 0000000..fc281c0 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.kokoro/build.sh b/.kokoro/build.sh index b96af36..7640bf3 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-memcache +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-memcache" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,16 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 1118107..b15caf9 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.trampolinerc b/.trampolinerc index 995ee29..383b6ec 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 92e2f10..aacba05 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d1..e783f4c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf22..bcd37bb 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/docs/index.rst b/docs/index.rst index 221e3bf..56540c0 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,14 +2,27 @@ .. include:: multiprocessing.rst -API Reference -------------- +This package includes clients for multiple versions of the Memcache +API. By default, you will get ``v1``, the latest GA version. + +memcache_v1 API Reference +------------------------- + +.. toctree:: + :maxdepth: 2 + + Client (v1) + Types (v1) + + +memcache_v1beta2 API Reference +------------------------------ .. toctree:: :maxdepth: 2 - memcache_v1beta2/services - memcache_v1beta2/types + Client (v1beta2) + Types (v1beta2) Changelog diff --git a/docs/memcache_v1/cloud_memcache.rst b/docs/memcache_v1/cloud_memcache.rst new file mode 100644 index 0000000..0c21866 --- /dev/null +++ b/docs/memcache_v1/cloud_memcache.rst @@ -0,0 +1,11 @@ +CloudMemcache +------------------------------- + +.. automodule:: google.cloud.memcache_v1.services.cloud_memcache + :members: + :inherited-members: + + +.. automodule:: google.cloud.memcache_v1.services.cloud_memcache.pagers + :members: + :inherited-members: diff --git a/docs/memcache_v1/services.rst b/docs/memcache_v1/services.rst new file mode 100644 index 0000000..01fd6b8 --- /dev/null +++ b/docs/memcache_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Memcache v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + cloud_memcache diff --git a/docs/memcache_v1/types.rst b/docs/memcache_v1/types.rst new file mode 100644 index 0000000..06bda21 --- /dev/null +++ b/docs/memcache_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Memcache v1 API +====================================== + +.. automodule:: google.cloud.memcache_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/memcache_v1beta2/cloud_memcache.rst b/docs/memcache_v1beta2/cloud_memcache.rst new file mode 100644 index 0000000..b20fc3a --- /dev/null +++ b/docs/memcache_v1beta2/cloud_memcache.rst @@ -0,0 +1,11 @@ +CloudMemcache +------------------------------- + +.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache + :members: + :inherited-members: + + +.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache.pagers + :members: + :inherited-members: diff --git a/docs/memcache_v1beta2/services.rst b/docs/memcache_v1beta2/services.rst index 6b2845e..e5faef5 100644 --- a/docs/memcache_v1beta2/services.rst +++ b/docs/memcache_v1beta2/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Memcache v1beta2 API ============================================== +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache - :members: - :inherited-members: + cloud_memcache diff --git a/docs/memcache_v1beta2/types.rst b/docs/memcache_v1beta2/types.rst index 1b47aa6..19e52f6 100644 --- a/docs/memcache_v1beta2/types.rst +++ b/docs/memcache_v1beta2/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Memcache v1beta2 API .. automodule:: google.cloud.memcache_v1beta2.types :members: + :undoc-members: :show-inheritance: diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index b66f063..4075bad 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -15,26 +15,22 @@ # limitations under the License. # -from google.cloud.memcache_v1beta2.services.cloud_memcache.async_client import ( +from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( CloudMemcacheAsyncClient, ) -from google.cloud.memcache_v1beta2.services.cloud_memcache.client import ( - CloudMemcacheClient, -) -from google.cloud.memcache_v1beta2.types.cloud_memcache import ApplyParametersRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import CreateInstanceRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import DeleteInstanceRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import GetInstanceRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import Instance -from google.cloud.memcache_v1beta2.types.cloud_memcache import ListInstancesRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import ListInstancesResponse -from google.cloud.memcache_v1beta2.types.cloud_memcache import LocationMetadata -from google.cloud.memcache_v1beta2.types.cloud_memcache import MemcacheParameters -from google.cloud.memcache_v1beta2.types.cloud_memcache import MemcacheVersion -from google.cloud.memcache_v1beta2.types.cloud_memcache import OperationMetadata -from google.cloud.memcache_v1beta2.types.cloud_memcache import UpdateInstanceRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import UpdateParametersRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import ZoneMetadata +from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient +from google.cloud.memcache_v1.types.cloud_memcache import ApplyParametersRequest +from google.cloud.memcache_v1.types.cloud_memcache import CreateInstanceRequest +from google.cloud.memcache_v1.types.cloud_memcache import DeleteInstanceRequest +from google.cloud.memcache_v1.types.cloud_memcache import GetInstanceRequest +from google.cloud.memcache_v1.types.cloud_memcache import Instance +from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesRequest +from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesResponse +from google.cloud.memcache_v1.types.cloud_memcache import MemcacheParameters +from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion +from google.cloud.memcache_v1.types.cloud_memcache import OperationMetadata +from google.cloud.memcache_v1.types.cloud_memcache import UpdateInstanceRequest +from google.cloud.memcache_v1.types.cloud_memcache import UpdateParametersRequest __all__ = ( "ApplyParametersRequest", @@ -46,11 +42,9 @@ "Instance", "ListInstancesRequest", "ListInstancesResponse", - "LocationMetadata", "MemcacheParameters", "MemcacheVersion", "OperationMetadata", "UpdateInstanceRequest", "UpdateParametersRequest", - "ZoneMetadata", ) diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py new file mode 100644 index 0000000..4d28d1b --- /dev/null +++ b/google/cloud/memcache_v1/__init__.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.cloud_memcache import CloudMemcacheClient +from .types.cloud_memcache import ApplyParametersRequest +from .types.cloud_memcache import CreateInstanceRequest +from .types.cloud_memcache import DeleteInstanceRequest +from .types.cloud_memcache import GetInstanceRequest +from .types.cloud_memcache import Instance +from .types.cloud_memcache import ListInstancesRequest +from .types.cloud_memcache import ListInstancesResponse +from .types.cloud_memcache import MemcacheParameters +from .types.cloud_memcache import MemcacheVersion +from .types.cloud_memcache import OperationMetadata +from .types.cloud_memcache import UpdateInstanceRequest +from .types.cloud_memcache import UpdateParametersRequest + + +__all__ = ( + "ApplyParametersRequest", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MemcacheParameters", + "MemcacheVersion", + "OperationMetadata", + "UpdateInstanceRequest", + "UpdateParametersRequest", + "CloudMemcacheClient", +) diff --git a/google/cloud/memcache_v1/py.typed b/google/cloud/memcache_v1/py.typed new file mode 100644 index 0000000..7959cf4 --- /dev/null +++ b/google/cloud/memcache_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-memcache package uses inline types. diff --git a/google/cloud/memcache_v1/services/__init__.py b/google/cloud/memcache_v1/services/__init__.py new file mode 100644 index 0000000..42ffdf2 --- /dev/null +++ b/google/cloud/memcache_v1/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py new file mode 100644 index 0000000..8524cb4 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import CloudMemcacheClient +from .async_client import CloudMemcacheAsyncClient + +__all__ = ( + "CloudMemcacheClient", + "CloudMemcacheAsyncClient", +) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py new file mode 100644 index 0000000..fd21c36 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -0,0 +1,857 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.memcache_v1.services.cloud_memcache import pagers +from google.cloud.memcache_v1.types import cloud_memcache +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport +from .client import CloudMemcacheClient + + +class CloudMemcacheAsyncClient: + """Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + """ + + _client: CloudMemcacheClient + + DEFAULT_ENDPOINT = CloudMemcacheClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudMemcacheClient.DEFAULT_MTLS_ENDPOINT + + instance_path = staticmethod(CloudMemcacheClient.instance_path) + parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) + + common_billing_account_path = staticmethod( + CloudMemcacheClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudMemcacheClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) + parse_common_folder_path = staticmethod( + CloudMemcacheClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + CloudMemcacheClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CloudMemcacheClient.parse_common_organization_path + ) + + common_project_path = staticmethod(CloudMemcacheClient.common_project_path) + parse_common_project_path = staticmethod( + CloudMemcacheClient.parse_common_project_path + ) + + common_location_path = staticmethod(CloudMemcacheClient.common_location_path) + parse_common_location_path = staticmethod( + CloudMemcacheClient.parse_common_location_path + ) + + from_service_account_info = CloudMemcacheClient.from_service_account_info + from_service_account_file = CloudMemcacheClient.from_service_account_file + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudMemcacheTransport: + """Return the transport used by the client instance. + + Returns: + CloudMemcacheTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudMemcacheClient).get_transport_class, type(CloudMemcacheClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud memcache client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudMemcacheTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = CloudMemcacheClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_instances( + self, + request: cloud_memcache.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists Instances in a given location. + + Args: + request (:class:`google.cloud.memcache_v1.types.ListInstancesRequest`): + The request object. Request for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesAsyncPager: + Response for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: cloud_memcache.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.Instance: + r"""Gets details of a single Instance. + + Args: + request (:class:`google.cloud.memcache_v1.types.GetInstanceRequest`): + The request object. Request for + [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. + name (:class:`str`): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memcache_v1.types.Instance: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_instance( + self, + request: cloud_memcache.CreateInstanceRequest = None, + *, + parent: str = None, + instance: cloud_memcache.Instance = None, + instance_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Instance in a given location. + + Args: + request (:class:`google.cloud.memcache_v1.types.CreateInstanceRequest`): + The request object. Request for + [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.memcache_v1.types.Instance`): + Required. A Memcached Instance + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The logical name of the Memcached instance in + the user project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the user project / location. + + If any of the above are not met, will raise an invalid + argument error. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance( + self, + request: cloud_memcache.UpdateInstanceRequest = None, + *, + instance: cloud_memcache.Instance = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing Instance in a given project and + location. + + Args: + request (:class:`google.cloud.memcache_v1.types.UpdateInstanceRequest`): + The request object. Request for + [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. + instance (:class:`google.cloud.memcache_v1.types.Instance`): + Required. A Memcached Instance. Only fields specified in + update_mask are updated. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + + - ``displayName`` + + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_parameters( + self, + request: cloud_memcache.UpdateParametersRequest = None, + *, + name: str = None, + update_mask: field_mask.FieldMask = None, + parameters: cloud_memcache.MemcacheParameters = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Args: + request (:class:`google.cloud.memcache_v1.types.UpdateParametersRequest`): + The request object. Request for + [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. + name (:class:`str`): + Required. Resource name of the + Memcached instance for which the + parameters should be updated. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parameters (:class:`google.cloud.memcache_v1.types.MemcacheParameters`): + The parameters to apply to the + instance. + + This corresponds to the ``parameters`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, update_mask, parameters]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.UpdateParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if update_mask is not None: + request.update_mask = update_mask + if parameters is not None: + request.parameters = parameters + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_parameters, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: cloud_memcache.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Instance. + + Args: + request (:class:`google.cloud.memcache_v1.types.DeleteInstanceRequest`): + The request object. Request for + [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. + name (:class:`str`): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty.Empty, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def apply_parameters( + self, + request: cloud_memcache.ApplyParametersRequest = None, + *, + name: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""ApplyParameters will restart the set of specified + nodes in order to update them to the current set of + parameters for the Memcached Instance. + + Args: + request (:class:`google.cloud.memcache_v1.types.ApplyParametersRequest`): + The request object. Request for + [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. + name (:class:`str`): + Required. Resource name of the + Memcached instance for which parameter + group updates should be applied. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (:class:`Sequence[str]`): + Nodes to which we should apply the + instance-level parameter group. + + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (:class:`bool`): + Whether to apply instance-level + parameter group to all nodes. If set to + true, will explicitly restrict users + from specifying any nodes, and apply + parameter group updates to all nodes + within the instance. + + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.ApplyParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if apply_all is not None: + request.apply_all = apply_all + + if node_ids: + request.node_ids.extend(node_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.apply_parameters, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py new file mode 100644 index 0000000..0ae3f3e --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -0,0 +1,1063 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.memcache_v1.services.cloud_memcache import pagers +from google.cloud.memcache_v1.types import cloud_memcache +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudMemcacheGrpcTransport +from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport + + +class CloudMemcacheClientMeta(type): + """Metaclass for the CloudMemcache client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] + _transport_registry["grpc"] = CloudMemcacheGrpcTransport + _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): + """Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "memcache.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudMemcacheTransport: + """Return the transport used by the client instance. + + Returns: + CloudMemcacheTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def instance_path(project: str, location: str, instance: str,) -> str: + """Return a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, location=location, instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parse a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, CloudMemcacheTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud memcache client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudMemcacheTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudMemcacheTransport): + # transport is a CloudMemcacheTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_instances( + self, + request: cloud_memcache.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists Instances in a given location. + + Args: + request (google.cloud.memcache_v1.types.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + parent (str): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesPager: + Response for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.ListInstancesRequest): + request = cloud_memcache.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance( + self, + request: cloud_memcache.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.Instance: + r"""Gets details of a single Instance. + + Args: + request (google.cloud.memcache_v1.types.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. + name (str): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memcache_v1.types.Instance: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.GetInstanceRequest): + request = cloud_memcache.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_instance( + self, + request: cloud_memcache.CreateInstanceRequest = None, + *, + parent: str = None, + instance: cloud_memcache.Instance = None, + instance_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Instance in a given location. + + Args: + request (google.cloud.memcache_v1.types.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. + parent (str): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.memcache_v1.types.Instance): + Required. A Memcached Instance + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The logical name of the Memcached instance in + the user project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the user project / location. + + If any of the above are not met, will raise an invalid + argument error. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.CreateInstanceRequest): + request = cloud_memcache.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance( + self, + request: cloud_memcache.UpdateInstanceRequest = None, + *, + instance: cloud_memcache.Instance = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing Instance in a given project and + location. + + Args: + request (google.cloud.memcache_v1.types.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. + instance (google.cloud.memcache_v1.types.Instance): + Required. A Memcached Instance. Only fields specified in + update_mask are updated. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + + - ``displayName`` + + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.UpdateInstanceRequest): + request = cloud_memcache.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_parameters( + self, + request: cloud_memcache.UpdateParametersRequest = None, + *, + name: str = None, + update_mask: field_mask.FieldMask = None, + parameters: cloud_memcache.MemcacheParameters = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Args: + request (google.cloud.memcache_v1.types.UpdateParametersRequest): + The request object. Request for + [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. + name (str): + Required. Resource name of the + Memcached instance for which the + parameters should be updated. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parameters (google.cloud.memcache_v1.types.MemcacheParameters): + The parameters to apply to the + instance. + + This corresponds to the ``parameters`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, update_mask, parameters]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.UpdateParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.UpdateParametersRequest): + request = cloud_memcache.UpdateParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if update_mask is not None: + request.update_mask = update_mask + if parameters is not None: + request.parameters = parameters + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_parameters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance( + self, + request: cloud_memcache.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Instance. + + Args: + request (google.cloud.memcache_v1.types.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. + name (str): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.DeleteInstanceRequest): + request = cloud_memcache.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty.Empty, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + def apply_parameters( + self, + request: cloud_memcache.ApplyParametersRequest = None, + *, + name: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""ApplyParameters will restart the set of specified + nodes in order to update them to the current set of + parameters for the Memcached Instance. + + Args: + request (google.cloud.memcache_v1.types.ApplyParametersRequest): + The request object. Request for + [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. + name (str): + Required. Resource name of the + Memcached instance for which parameter + group updates should be applied. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (Sequence[str]): + Nodes to which we should apply the + instance-level parameter group. + + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (bool): + Whether to apply instance-level + parameter group to all nodes. If set to + true, will explicitly restrict users + from specifying any nodes, and apply + parameter group updates to all nodes + within the instance. + + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.ApplyParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.ApplyParametersRequest): + request = cloud_memcache.ApplyParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if node_ids is not None: + request.node_ids = node_ids + if apply_all is not None: + request.apply_all = apply_all + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_parameters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py new file mode 100644 index 0000000..7a1324e --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.memcache_v1.types import cloud_memcache + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.memcache_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.memcache_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloud_memcache.ListInstancesResponse], + request: cloud_memcache.ListInstancesRequest, + response: cloud_memcache.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.memcache_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.memcache_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_memcache.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloud_memcache.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[cloud_memcache.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.memcache_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.memcache_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloud_memcache.ListInstancesResponse]], + request: cloud_memcache.ListInstancesRequest, + response: cloud_memcache.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.memcache_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.memcache_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_memcache.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloud_memcache.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[cloud_memcache.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py new file mode 100644 index 0000000..38122c6 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudMemcacheTransport +from .grpc import CloudMemcacheGrpcTransport +from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] +_transport_registry["grpc"] = CloudMemcacheGrpcTransport +_transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport + +__all__ = ( + "CloudMemcacheTransport", + "CloudMemcacheGrpcTransport", + "CloudMemcacheGrpcAsyncIOTransport", +) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py new file mode 100644 index 0000000..a6a2b3c --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.memcache_v1.types import cloud_memcache +from google.longrunning import operations_pb2 as operations # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class CloudMemcacheTransport(abc.ABC): + """Abstract transport class for CloudMemcache.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, default_timeout=1200.0, client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, default_timeout=1200.0, client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, default_timeout=1200.0, client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, default_timeout=1200.0, client_info=client_info, + ), + self.update_parameters: gapic_v1.method.wrap_method( + self.update_parameters, default_timeout=1200.0, client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, default_timeout=1200.0, client_info=client_info, + ), + self.apply_parameters: gapic_v1.method.wrap_method( + self.apply_parameters, default_timeout=1200.0, client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances( + self, + ) -> typing.Callable[ + [cloud_memcache.ListInstancesRequest], + typing.Union[ + cloud_memcache.ListInstancesResponse, + typing.Awaitable[cloud_memcache.ListInstancesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_instance( + self, + ) -> typing.Callable[ + [cloud_memcache.GetInstanceRequest], + typing.Union[ + cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] + ], + ]: + raise NotImplementedError() + + @property + def create_instance( + self, + ) -> typing.Callable[ + [cloud_memcache.CreateInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def update_instance( + self, + ) -> typing.Callable[ + [cloud_memcache.UpdateInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def update_parameters( + self, + ) -> typing.Callable[ + [cloud_memcache.UpdateParametersRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_instance( + self, + ) -> typing.Callable[ + [cloud_memcache.DeleteInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def apply_parameters( + self, + ) -> typing.Callable[ + [cloud_memcache.ApplyParametersRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + +__all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py new file mode 100644 index 0000000..bc03a88 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -0,0 +1,477 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.memcache_v1.types import cloud_memcache +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO + + +class CloudMemcacheGrpcTransport(CloudMemcacheTransport): + """gRPC backend transport for CloudMemcache. + + Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=self._ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._stubs = {} # type: Dict[str, Callable] + self._operations_client = None + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given location. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/ListInstances", + request_serializer=cloud_memcache.ListInstancesRequest.serialize, + response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/GetInstance", + request_serializer=cloud_memcache.GetInstanceRequest.serialize, + response_deserializer=cloud_memcache.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given location. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", + request_serializer=cloud_memcache.CreateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates an existing Instance in a given project and + location. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", + request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def update_parameters( + self, + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: + r"""Return a callable for the update parameters method over gRPC. + + Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Returns: + Callable[[~.UpdateParametersRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_parameters" not in self._stubs: + self._stubs["update_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", + request_serializer=cloud_memcache.UpdateParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_parameters"] + + @property + def delete_instance( + self, + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", + request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def apply_parameters( + self, + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: + r"""Return a callable for the apply parameters method over gRPC. + + ApplyParameters will restart the set of specified + nodes in order to update them to the current set of + parameters for the Memcached Instance. + + Returns: + Callable[[~.ApplyParametersRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_parameters" not in self._stubs: + self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", + request_serializer=cloud_memcache.ApplyParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_parameters"] + + +__all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py new file mode 100644 index 0000000..c11f7c5 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -0,0 +1,496 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.memcache_v1.types import cloud_memcache +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudMemcacheGrpcTransport + + +class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): + """gRPC AsyncIO backend transport for CloudMemcache. + + Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=self._ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + self._operations_client = None + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [cloud_memcache.ListInstancesRequest], + Awaitable[cloud_memcache.ListInstancesResponse], + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/ListInstances", + request_serializer=cloud_memcache.ListInstancesRequest.serialize, + response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[ + [cloud_memcache.GetInstanceRequest], Awaitable[cloud_memcache.Instance] + ]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/GetInstance", + request_serializer=cloud_memcache.GetInstanceRequest.serialize, + response_deserializer=cloud_memcache.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[ + [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", + request_serializer=cloud_memcache.CreateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[ + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the update instance method over gRPC. + + Updates an existing Instance in a given project and + location. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", + request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def update_parameters( + self, + ) -> Callable[ + [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the update parameters method over gRPC. + + Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Returns: + Callable[[~.UpdateParametersRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_parameters" not in self._stubs: + self._stubs["update_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", + request_serializer=cloud_memcache.UpdateParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_parameters"] + + @property + def delete_instance( + self, + ) -> Callable[ + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", + request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def apply_parameters( + self, + ) -> Callable[ + [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the apply parameters method over gRPC. + + ApplyParameters will restart the set of specified + nodes in order to update them to the current set of + parameters for the Memcached Instance. + + Returns: + Callable[[~.ApplyParametersRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_parameters" not in self._stubs: + self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", + request_serializer=cloud_memcache.ApplyParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_parameters"] + + +__all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py new file mode 100644 index 0000000..5fe285a --- /dev/null +++ b/google/cloud/memcache_v1/types/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .cloud_memcache import ( + Instance, + ListInstancesRequest, + ListInstancesResponse, + GetInstanceRequest, + CreateInstanceRequest, + UpdateInstanceRequest, + DeleteInstanceRequest, + ApplyParametersRequest, + UpdateParametersRequest, + MemcacheParameters, + OperationMetadata, + MemcacheVersion, +) + +__all__ = ( + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "ApplyParametersRequest", + "UpdateParametersRequest", + "MemcacheParameters", + "OperationMetadata", + "MemcacheVersion", +) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py new file mode 100644 index 0000000..066bc9d --- /dev/null +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -0,0 +1,518 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.memcache.v1", + manifest={ + "MemcacheVersion", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "ApplyParametersRequest", + "UpdateParametersRequest", + "MemcacheParameters", + "OperationMetadata", + }, +) + + +class MemcacheVersion(proto.Enum): + r"""Memcached versions supported by our service.""" + MEMCACHE_VERSION_UNSPECIFIED = 0 + MEMCACHE_1_5 = 1 + + +class Instance(proto.Message): + r""" + + Attributes: + name (str): + Required. Unique name of the resource in this scope + including project and location using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note: Memcached instances are managed and addressed at + regional level so location_id here refers to a GCP region; + however, users may choose which zones Memcached nodes within + an instances should be provisioned in. Refer to [zones] + field for more details. + display_name (str): + User provided name for the instance only used + for display purposes. Cannot be more than 80 + characters. + labels (Sequence[google.cloud.memcache_v1.types.Instance.LabelsEntry]): + Resource labels to represent user-provided + metadata. Refer to cloud documentation on labels + for more details. + https://cloud.google.com/compute/docs/labeling- + resources + authorized_network (str): + The full name of the Google Compute Engine + `network `__ + to which the instance is connected. If left unspecified, the + ``default`` network will be used. + zones (Sequence[str]): + Zones where Memcached nodes should be + provisioned in. Memcached nodes will be equally + distributed across these zones. If not provided, + the service will by default create nodes in all + zones in the region for the instance. + node_count (int): + Required. Number of nodes in the Memcached + instance. + node_config (google.cloud.memcache_v1.types.Instance.NodeConfig): + Required. Configuration for Memcached nodes. + memcache_version (google.cloud.memcache_v1.types.MemcacheVersion): + The major version of Memcached software. If not provided, + latest supported version will be used. Currently the latest + supported major version is MEMCACHE_1_5. The minor version + will be automatically determined by our system based on the + latest supported minor version. + parameters (google.cloud.memcache_v1.types.MemcacheParameters): + Optional: User defined parameters to apply to + the memcached process on each node. + memcache_nodes (Sequence[google.cloud.memcache_v1.types.Instance.Node]): + Output only. List of Memcached nodes. Refer to [Node] + message for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the instance was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the instance was + updated. + state (google.cloud.memcache_v1.types.Instance.State): + Output only. The state of this Memcached + instance. + memcache_full_version (str): + Output only. The full version of memcached + server running on this instance. System + automatically determines the full memcached + version for an instance based on the input + MemcacheVersion. + The full version format will be + "memcached-1.5.16". + instance_messages (Sequence[google.cloud.memcache_v1.types.Instance.InstanceMessage]): + List of messages that describe current + statuses of memcached instance. + discovery_endpoint (str): + Output only. Endpoint for Discovery API + """ + + class State(proto.Enum): + r"""Different states of a Memcached instance.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 4 + PERFORMING_MAINTENANCE = 5 + + class NodeConfig(proto.Message): + r"""Configuration for a Memcached Node. + + Attributes: + cpu_count (int): + Required. Number of cpus per Memcached node. + memory_size_mb (int): + Required. Memory size in MiB for each + Memcached node. + """ + + cpu_count = proto.Field(proto.INT32, number=1) + + memory_size_mb = proto.Field(proto.INT32, number=2) + + class Node(proto.Message): + r""" + + Attributes: + node_id (str): + Output only. Identifier of the Memcached + node. The node id does not include project or + location like the Memcached instance name. + zone (str): + Output only. Location (GCP Zone) for the + Memcached node. + state (google.cloud.memcache_v1.types.Instance.Node.State): + Output only. Current state of the Memcached + node. + host (str): + Output only. Hostname or IP address of the + Memcached node used by the clients to connect to + the Memcached server on this node. + port (int): + Output only. The port number of the Memcached + server on this node. + parameters (google.cloud.memcache_v1.types.MemcacheParameters): + User defined parameters currently applied to + the node. + """ + + class State(proto.Enum): + r"""Different states of a Memcached node.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + UPDATING = 4 + + node_id = proto.Field(proto.STRING, number=1) + + zone = proto.Field(proto.STRING, number=2) + + state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) + + host = proto.Field(proto.STRING, number=4) + + port = proto.Field(proto.INT32, number=5) + + parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) + + class InstanceMessage(proto.Message): + r""" + + Attributes: + code (google.cloud.memcache_v1.types.Instance.InstanceMessage.Code): + A code that correspond to one type of user- + acing message. + message (str): + Message on memcached instance which will be + exposed to users. + """ + + class Code(proto.Enum): + r"""""" + CODE_UNSPECIFIED = 0 + ZONE_DISTRIBUTION_UNBALANCED = 1 + + code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) + + message = proto.Field(proto.STRING, number=2) + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + + authorized_network = proto.Field(proto.STRING, number=4) + + zones = proto.RepeatedField(proto.STRING, number=5) + + node_count = proto.Field(proto.INT32, number=6) + + node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) + + memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) + + parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) + + memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) + + create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + + state = proto.Field(proto.ENUM, number=15, enum=State,) + + memcache_full_version = proto.Field(proto.STRING, number=18) + + instance_messages = proto.RepeatedField( + proto.MESSAGE, number=19, message=InstanceMessage, + ) + + discovery_endpoint = proto.Field(proto.STRING, number=20) + + +class ListInstancesRequest(proto.Message): + r"""Request for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + Attributes: + parent (str): + Required. The resource name of the instance location using + the form: ``projects/{project_id}/locations/{location_id}`` + where ``location_id`` refers to a GCP region + page_size (int): + The maximum number of items to return. + + If not specified, a default value of 1000 will be used by + the service. Regardless of the page_size value, the response + may include a partial list and a caller should only rely on + response's + [next_page_token][CloudMemcache.ListInstancesResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + filter (str): + List filter. For example, exclude all + Memcached instances with name as my-instance by + specifying "name != my-instance". + order_by (str): + Sort results. Supported values are "name", + "name desc" or "" (unsorted). + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + filter = proto.Field(proto.STRING, number=4) + + order_by = proto.Field(proto.STRING, number=5) + + +class ListInstancesResponse(proto.Message): + r"""Response for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + Attributes: + instances (Sequence[google.cloud.memcache_v1.types.Instance]): + A list of Memcached instances in the project in the + specified location, or across all locations. + + If the ``location_id`` in the parent field of the request is + "-", all regions available to the project are queried, and + the results aggregated. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (Sequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) + + next_page_token = proto.Field(proto.STRING, number=2) + + unreachable = proto.RepeatedField(proto.STRING, number=3) + + +class GetInstanceRequest(proto.Message): + r"""Request for + [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. + + Attributes: + name (str): + Required. Memcached instance resource name in the format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateInstanceRequest(proto.Message): + r"""Request for + [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. + + Attributes: + parent (str): + Required. The resource name of the instance location using + the form: ``projects/{project_id}/locations/{location_id}`` + where ``location_id`` refers to a GCP region + instance_id (str): + Required. The logical name of the Memcached instance in the + user project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the user project / location. + + If any of the above are not met, will raise an invalid + argument error. + instance (google.cloud.memcache_v1.types.Instance): + Required. A Memcached Instance + """ + + parent = proto.Field(proto.STRING, number=1) + + instance_id = proto.Field(proto.STRING, number=2) + + instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) + + +class UpdateInstanceRequest(proto.Message): + r"""Request for + [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + + - ``displayName`` + instance (google.cloud.memcache_v1.types.Instance): + Required. A Memcached Instance. Only fields specified in + update_mask are updated. + """ + + update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) + + instance = proto.Field(proto.MESSAGE, number=2, message="Instance",) + + +class DeleteInstanceRequest(proto.Message): + r"""Request for + [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. + + Attributes: + name (str): + Required. Memcached instance resource name in the format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + """ + + name = proto.Field(proto.STRING, number=1) + + +class ApplyParametersRequest(proto.Message): + r"""Request for + [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. + + Attributes: + name (str): + Required. Resource name of the Memcached + instance for which parameter group updates + should be applied. + node_ids (Sequence[str]): + Nodes to which we should apply the instance- + evel parameter group. + apply_all (bool): + Whether to apply instance-level parameter + group to all nodes. If set to true, will + explicitly restrict users from specifying any + nodes, and apply parameter group updates to all + nodes within the instance. + """ + + name = proto.Field(proto.STRING, number=1) + + node_ids = proto.RepeatedField(proto.STRING, number=2) + + apply_all = proto.Field(proto.BOOL, number=3) + + +class UpdateParametersRequest(proto.Message): + r"""Request for + [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. + + Attributes: + name (str): + Required. Resource name of the Memcached + instance for which the parameters should be + updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + parameters (google.cloud.memcache_v1.types.MemcacheParameters): + The parameters to apply to the instance. + """ + + name = proto.Field(proto.STRING, number=1) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) + + +class MemcacheParameters(proto.Message): + r""" + + Attributes: + id (str): + Output only. The unique ID associated with + this set of parameters. Users can use this id to + determine if the parameters associated with the + instance differ from the parameters associated + with the nodes and any action needs to be taken + to apply parameters on nodes. + params (Sequence[google.cloud.memcache_v1.types.MemcacheParameters.ParamsEntry]): + User defined set of parameters to use in the + memcached process. + """ + + id = proto.Field(proto.STRING, number=1) + + params = proto.MapField(proto.STRING, proto.STRING, number=3) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_detail (str): + Output only. Human-readable status of the + operation, if any. + cancel_requested (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + target = proto.Field(proto.STRING, number=3) + + verb = proto.Field(proto.STRING, number=4) + + status_detail = proto.Field(proto.STRING, number=5) + + cancel_requested = proto.Field(proto.BOOL, number=6) + + api_version = proto.Field(proto.STRING, number=7) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index 13cd09f..bac2393 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -17,6 +17,7 @@ from .services.cloud_memcache import CloudMemcacheClient from .types.cloud_memcache import ApplyParametersRequest +from .types.cloud_memcache import ApplySoftwareUpdateRequest from .types.cloud_memcache import CreateInstanceRequest from .types.cloud_memcache import DeleteInstanceRequest from .types.cloud_memcache import GetInstanceRequest @@ -34,6 +35,7 @@ __all__ = ( "ApplyParametersRequest", + "ApplySoftwareUpdateRequest", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 9373a06..d0d7b85 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -58,8 +58,7 @@ class CloudMemcacheAsyncClient: - As such, Memcached instances are resources of the form: ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note that location_id must be refering to a GCP ``region``; for - example: + Note that location_id must be a GCP ``region``; for example: - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` """ @@ -101,6 +100,7 @@ class CloudMemcacheAsyncClient: CloudMemcacheClient.parse_common_location_path ) + from_service_account_info = CloudMemcacheClient.from_service_account_info from_service_account_file = CloudMemcacheClient.from_service_account_file from_service_account_json = from_service_account_file @@ -174,10 +174,10 @@ async def list_instances( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: - r"""Lists Instances in a given project and location. + r"""Lists Instances in a given location. Args: - request (:class:`~.cloud_memcache.ListInstancesRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.ListInstancesRequest`): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. parent (:class:`str`): @@ -185,6 +185,7 @@ async def list_instances( using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -196,7 +197,7 @@ async def list_instances( sent along with the request as metadata. Returns: - ~.pagers.ListInstancesAsyncPager: + google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesAsyncPager: Response for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. @@ -260,7 +261,7 @@ async def get_instance( r"""Gets details of a single Instance. Args: - request (:class:`~.cloud_memcache.GetInstanceRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.GetInstanceRequest`): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. name (:class:`str`): @@ -268,6 +269,7 @@ async def get_instance( format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -279,8 +281,8 @@ async def get_instance( sent along with the request as metadata. Returns: - ~.cloud_memcache.Instance: - + google.cloud.memcache_v1beta2.types.Instance: + A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -331,11 +333,10 @@ async def create_instance( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates a new Instance in a given project and - location. + r"""Creates a new Instance in a given location. Args: - request (:class:`~.cloud_memcache.CreateInstanceRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.CreateInstanceRequest`): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. parent (:class:`str`): @@ -343,6 +344,7 @@ async def create_instance( using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -355,12 +357,15 @@ async def create_instance( - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. + + If any of the above are not met, the API raises an + invalid argument error. This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.cloud_memcache.Instance`): + resource (:class:`google.cloud.memcache_v1beta2.types.Instance`): Required. A Memcached [Instance] resource This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this @@ -373,11 +378,12 @@ async def create_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -444,20 +450,22 @@ async def update_instance( location. Args: - request (:class:`~.cloud_memcache.UpdateInstanceRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.UpdateInstanceRequest`): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Mask of fields to update. - ``displayName`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.cloud_memcache.Instance`): + resource (:class:`google.cloud.memcache_v1beta2.types.Instance`): Required. A Memcached [Instance] resource. Only fields specified in update_mask are updated. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -469,11 +477,12 @@ async def update_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -537,30 +546,32 @@ async def update_parameters( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + r"""Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Args: - request (:class:`~.cloud_memcache.UpdateParametersRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.UpdateParametersRequest`): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. name (:class:`str`): Required. Resource name of the Memcached instance for which the parameters should be updated. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Mask of fields to update. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`~.cloud_memcache.MemcacheParameters`): + parameters (:class:`google.cloud.memcache_v1beta2.types.MemcacheParameters`): The parameters to apply to the instance. + This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -572,11 +583,12 @@ async def update_parameters( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -641,13 +653,15 @@ async def delete_instance( r"""Deletes a single Instance. Args: - request (:class:`~.cloud_memcache.DeleteInstanceRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.DeleteInstanceRequest`): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. name (:class:`str`): - Memcached instance resource name in the format: + Required. Memcached instance resource name in the + format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -659,24 +673,22 @@ async def delete_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -736,33 +748,35 @@ async def apply_parameters( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""ApplyParameters will update current set of Parameters - to the set of specified nodes of the Memcached Instance. + r"""``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Args: - request (:class:`~.cloud_memcache.ApplyParametersRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.ApplyParametersRequest`): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. name (:class:`str`): Required. Resource name of the Memcached instance for which parameter group updates should be applied. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. node_ids (:class:`Sequence[str]`): - Nodes to which we should apply the - instance-level parameter group. + Nodes to which the instance-level + parameter group is applied. + This corresponds to the ``node_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. apply_all (:class:`bool`): - Whether to apply instance-level - parameter group to all nodes. If set to - true, will explicitly restrict users - from specifying any nodes, and apply - parameter group updates to all nodes - within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from + specifying individual nodes, and ``ApplyParameters`` + updates all nodes within the instance. + This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -774,11 +788,12 @@ async def apply_parameters( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -832,6 +847,118 @@ async def apply_parameters( # Done; return the response. return response + async def apply_software_update( + self, + request: cloud_memcache.ApplySoftwareUpdateRequest = None, + *, + instance: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates software on the selected nodes of the + Instance. + + Args: + request (:class:`google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest`): + The request object. Request for + [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. + instance (:class:`str`): + Required. Resource name of the + Memcached instance for which software + update should be applied. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (:class:`Sequence[str]`): + Nodes to which we should apply the + update to. Note all the selected nodes + are updated in parallel. + + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (:class:`bool`): + Whether to apply the update to all + nodes. If set to true, will explicitly + restrict users from specifying any + nodes, and apply software update to all + nodes (where applicable) within the + instance. + + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.ApplySoftwareUpdateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if instance is not None: + request.instance = instance + if apply_all is not None: + request.apply_all = apply_all + + if node_ids: + request.node_ids.extend(node_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.apply_software_update, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 0a1ef7c..0da8b35 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -93,8 +93,7 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): - As such, Memcached instances are resources of the form: ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note that location_id must be refering to a GCP ``region``; for - example: + Note that location_id must be a GCP ``region``; for example: - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` """ @@ -133,6 +132,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -145,7 +160,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + CloudMemcacheClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -253,10 +268,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.CloudMemcacheTransport]): The + transport (Union[str, CloudMemcacheTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -292,21 +307,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -349,7 +360,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -363,17 +374,18 @@ def list_instances( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: - r"""Lists Instances in a given project and location. + r"""Lists Instances in a given location. Args: - request (:class:`~.cloud_memcache.ListInstancesRequest`): + request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - parent (:class:`str`): + parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -385,7 +397,7 @@ def list_instances( sent along with the request as metadata. Returns: - ~.pagers.ListInstancesPager: + google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesPager: Response for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. @@ -450,14 +462,15 @@ def get_instance( r"""Gets details of a single Instance. Args: - request (:class:`~.cloud_memcache.GetInstanceRequest`): + request (google.cloud.memcache_v1beta2.types.GetInstanceRequest): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. - name (:class:`str`): + name (str): Required. Memcached instance resource name in the format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -469,8 +482,8 @@ def get_instance( sent along with the request as metadata. Returns: - ~.cloud_memcache.Instance: - + google.cloud.memcache_v1beta2.types.Instance: + A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -522,22 +535,22 @@ def create_instance( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates a new Instance in a given project and - location. + r"""Creates a new Instance in a given location. Args: - request (:class:`~.cloud_memcache.CreateInstanceRequest`): + request (google.cloud.memcache_v1beta2.types.CreateInstanceRequest): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. - parent (:class:`str`): + parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - instance_id (:class:`str`): + instance_id (str): Required. The logical name of the Memcached instance in the user project with the following restrictions: @@ -546,12 +559,15 @@ def create_instance( - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. + + If any of the above are not met, the API raises an + invalid argument error. This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.cloud_memcache.Instance`): + resource (google.cloud.memcache_v1beta2.types.Instance): Required. A Memcached [Instance] resource This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this @@ -564,11 +580,12 @@ def create_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -636,20 +653,22 @@ def update_instance( location. Args: - request (:class:`~.cloud_memcache.UpdateInstanceRequest`): + request (google.cloud.memcache_v1beta2.types.UpdateInstanceRequest): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. - ``displayName`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.cloud_memcache.Instance`): + resource (google.cloud.memcache_v1beta2.types.Instance): Required. A Memcached [Instance] resource. Only fields specified in update_mask are updated. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -661,11 +680,12 @@ def update_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -730,30 +750,32 @@ def update_parameters( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + r"""Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Args: - request (:class:`~.cloud_memcache.UpdateParametersRequest`): + request (google.cloud.memcache_v1beta2.types.UpdateParametersRequest): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. - name (:class:`str`): + name (str): Required. Resource name of the Memcached instance for which the parameters should be updated. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`~.cloud_memcache.MemcacheParameters`): + parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): The parameters to apply to the instance. + This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -765,11 +787,12 @@ def update_parameters( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -835,13 +858,15 @@ def delete_instance( r"""Deletes a single Instance. Args: - request (:class:`~.cloud_memcache.DeleteInstanceRequest`): + request (google.cloud.memcache_v1beta2.types.DeleteInstanceRequest): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. - name (:class:`str`): - Memcached instance resource name in the format: + name (str): + Required. Memcached instance resource name in the + format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -853,24 +878,22 @@ def delete_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -931,33 +954,35 @@ def apply_parameters( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""ApplyParameters will update current set of Parameters - to the set of specified nodes of the Memcached Instance. + r"""``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Args: - request (:class:`~.cloud_memcache.ApplyParametersRequest`): + request (google.cloud.memcache_v1beta2.types.ApplyParametersRequest): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. - name (:class:`str`): + name (str): Required. Resource name of the Memcached instance for which parameter group updates should be applied. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (:class:`Sequence[str]`): - Nodes to which we should apply the - instance-level parameter group. + node_ids (Sequence[str]): + Nodes to which the instance-level + parameter group is applied. + This corresponds to the ``node_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - apply_all (:class:`bool`): - Whether to apply instance-level - parameter group to all nodes. If set to - true, will explicitly restrict users - from specifying any nodes, and apply - parameter group updates to all nodes - within the instance. + apply_all (bool): + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from + specifying individual nodes, and ``ApplyParameters`` + updates all nodes within the instance. + This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -969,11 +994,12 @@ def apply_parameters( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -998,12 +1024,11 @@ def apply_parameters( if name is not None: request.name = name + if node_ids is not None: + request.node_ids = node_ids if apply_all is not None: request.apply_all = apply_all - if node_ids: - request.node_ids.extend(node_ids) - # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.apply_parameters] @@ -1028,6 +1053,118 @@ def apply_parameters( # Done; return the response. return response + def apply_software_update( + self, + request: cloud_memcache.ApplySoftwareUpdateRequest = None, + *, + instance: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates software on the selected nodes of the + Instance. + + Args: + request (google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest): + The request object. Request for + [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. + instance (str): + Required. Resource name of the + Memcached instance for which software + update should be applied. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (Sequence[str]): + Nodes to which we should apply the + update to. Note all the selected nodes + are updated in parallel. + + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (bool): + Whether to apply the update to all + nodes. If set to true, will explicitly + restrict users from specifying any + nodes, and apply software update to all + nodes (where applicable) within the + instance. + + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.ApplySoftwareUpdateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.ApplySoftwareUpdateRequest): + request = cloud_memcache.ApplySoftwareUpdateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if instance is not None: + request.instance = instance + if node_ids is not None: + request.node_ids = node_ids + if apply_all is not None: + request.apply_all = apply_all + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_software_update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index 7e7696a..5b69afd 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.memcache_v1beta2.types import cloud_memcache @@ -24,7 +33,7 @@ class ListInstancesPager: """A pager for iterating through ``list_instances`` requests. This class thinly wraps an initial - :class:`~.cloud_memcache.ListInstancesResponse` object, and + :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` object, and provides an ``__iter__`` method to iterate through its ``resources`` field. @@ -33,7 +42,7 @@ class ListInstancesPager: through the ``resources`` field on the corresponding responses. - All the usual :class:`~.cloud_memcache.ListInstancesResponse` + All the usual :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.cloud_memcache.ListInstancesRequest`): + request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): The initial request object. - response (:class:`~.cloud_memcache.ListInstancesResponse`): + response (google.cloud.memcache_v1beta2.types.ListInstancesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListInstancesAsyncPager: """A pager for iterating through ``list_instances`` requests. This class thinly wraps an initial - :class:`~.cloud_memcache.ListInstancesResponse` object, and + :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` object, and provides an ``__aiter__`` method to iterate through its ``resources`` field. @@ -95,7 +104,7 @@ class ListInstancesAsyncPager: through the ``resources`` field on the corresponding responses. - All the usual :class:`~.cloud_memcache.ListInstancesResponse` + All the usual :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.cloud_memcache.ListInstancesRequest`): + request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): The initial request object. - response (:class:`~.cloud_memcache.ListInstancesResponse`): + response (google.cloud.memcache_v1beta2.types.ListInstancesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 3a96e70..2c598e4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -127,6 +127,11 @@ def _prep_wrapped_messages(self, client_info): self.apply_parameters: gapic_v1.method.wrap_method( self.apply_parameters, default_timeout=1200.0, client_info=client_info, ), + self.apply_software_update: gapic_v1.method.wrap_method( + self.apply_software_update, + default_timeout=1200.0, + client_info=client_info, + ), } @property @@ -202,5 +207,14 @@ def apply_parameters( ]: raise NotImplementedError() + @property + def apply_software_update( + self, + ) -> typing.Callable[ + [cloud_memcache.ApplySoftwareUpdateRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + __all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 09e1170..d1ffc02 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -52,8 +52,7 @@ class CloudMemcacheGrpcTransport(CloudMemcacheTransport): - As such, Memcached instances are resources of the form: ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note that location_id must be refering to a GCP ``region``; for - example: + Note that location_id must be a GCP ``region``; for example: - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` @@ -78,6 +77,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -108,6 +108,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -124,6 +128,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -133,11 +142,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -181,12 +185,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ @@ -281,7 +291,7 @@ def list_instances( ]: r"""Return a callable for the list instances method over gRPC. - Lists Instances in a given project and location. + Lists Instances in a given location. Returns: Callable[[~.ListInstancesRequest], @@ -333,8 +343,7 @@ def create_instance( ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: r"""Return a callable for the create instance method over gRPC. - Creates a new Instance in a given project and - location. + Creates a new Instance in a given location. Returns: Callable[[~.CreateInstanceRequest], @@ -387,10 +396,10 @@ def update_parameters( ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: r"""Return a callable for the update parameters method over gRPC. - Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Returns: Callable[[~.UpdateParametersRequest], @@ -442,8 +451,9 @@ def apply_parameters( ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: r"""Return a callable for the apply parameters method over gRPC. - ApplyParameters will update current set of Parameters - to the set of specified nodes of the Memcached Instance. + ``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Returns: Callable[[~.ApplyParametersRequest], @@ -463,5 +473,32 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + @property + def apply_software_update( + self, + ) -> Callable[[cloud_memcache.ApplySoftwareUpdateRequest], operations.Operation]: + r"""Return a callable for the apply software update method over gRPC. + + Updates software on the selected nodes of the + Instance. + + Returns: + Callable[[~.ApplySoftwareUpdateRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_software_update" not in self._stubs: + self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", + request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_software_update"] + __all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index 009acf5..24f0f89 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -54,8 +54,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): - As such, Memcached instances are resources of the form: ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note that location_id must be refering to a GCP ``region``; for - example: + Note that location_id must be a GCP ``region``; for example: - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` @@ -122,6 +121,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -153,6 +153,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -169,6 +173,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -178,11 +187,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -226,12 +230,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ @@ -288,7 +298,7 @@ def list_instances( ]: r"""Return a callable for the list instances method over gRPC. - Lists Instances in a given project and location. + Lists Instances in a given location. Returns: Callable[[~.ListInstancesRequest], @@ -344,8 +354,7 @@ def create_instance( ]: r"""Return a callable for the create instance method over gRPC. - Creates a new Instance in a given project and - location. + Creates a new Instance in a given location. Returns: Callable[[~.CreateInstanceRequest], @@ -402,10 +411,10 @@ def update_parameters( ]: r"""Return a callable for the update parameters method over gRPC. - Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Returns: Callable[[~.UpdateParametersRequest], @@ -461,8 +470,9 @@ def apply_parameters( ]: r"""Return a callable for the apply parameters method over gRPC. - ApplyParameters will update current set of Parameters - to the set of specified nodes of the Memcached Instance. + ``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Returns: Callable[[~.ApplyParametersRequest], @@ -482,5 +492,34 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + @property + def apply_software_update( + self, + ) -> Callable[ + [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the apply software update method over gRPC. + + Updates software on the selected nodes of the + Instance. + + Returns: + Callable[[~.ApplySoftwareUpdateRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_software_update" not in self._stubs: + self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", + request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_software_update"] + __all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 8619a5a..9fa6727 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -25,6 +25,7 @@ DeleteInstanceRequest, ApplyParametersRequest, UpdateParametersRequest, + ApplySoftwareUpdateRequest, MemcacheParameters, OperationMetadata, LocationMetadata, @@ -42,6 +43,7 @@ "DeleteInstanceRequest", "ApplyParametersRequest", "UpdateParametersRequest", + "ApplySoftwareUpdateRequest", "MemcacheParameters", "OperationMetadata", "LocationMetadata", diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 36cd39e..b7e9a43 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -35,6 +35,7 @@ "DeleteInstanceRequest", "ApplyParametersRequest", "UpdateParametersRequest", + "ApplySoftwareUpdateRequest", "MemcacheParameters", "OperationMetadata", "LocationMetadata", @@ -50,7 +51,7 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): - r""" + r"""A Memorystore for Memcached instance Attributes: name (str): @@ -58,57 +59,59 @@ class Instance(proto.Message): including project and location using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note: Memcached instances are managed and addressed at - regional level so location_id here refers to a GCP region; - however, users may choose which zones Memcached nodes within - an instances should be provisioned in. Refer to [zones] - field for more details. + Note: Memcached instances are managed and addressed at the + regional level so ``location_id`` here refers to a Google + Cloud region; however, users may choose which zones + Memcached nodes should be provisioned in within an instance. + Refer to + [zones][google.cloud.memcache.v1beta2.Instance.zones] field + for more details. display_name (str): - Optional. User provided name for the instance + User provided name for the instance, which is only used for display purposes. Cannot be more than 80 characters. - labels (Sequence[~.cloud_memcache.Instance.LabelsEntry]): - Optional. Resource labels to represent user- - rovided metadata. Refer to cloud documentation - on labels for more details. + labels (Sequence[google.cloud.memcache_v1beta2.types.Instance.LabelsEntry]): + Resource labels to represent user-provided + metadata. Refer to cloud documentation on labels + for more details. https://cloud.google.com/compute/docs/labeling- resources authorized_network (str): - Optional. The full name of the Google Compute Engine + The full name of the Google Compute Engine `network `__ to which the instance is connected. If left unspecified, the ``default`` network will be used. zones (Sequence[str]): - Optional. Zones where Memcached nodes should - be provisioned in. Memcached nodes will be - equally distributed across these zones. If not - provided, the service will by default create - nodes in all zones in the region for the - instance. + Zones in which Memcached nodes should be + provisioned. Memcached nodes will be equally + distributed across these zones. If not provided, + the service will by default create nodes in all + zones in the region for the instance. node_count (int): Required. Number of nodes in the Memcached instance. - node_config (~.cloud_memcache.Instance.NodeConfig): + node_config (google.cloud.memcache_v1beta2.types.Instance.NodeConfig): Required. Configuration for Memcached nodes. - memcache_version (~.cloud_memcache.MemcacheVersion): - Optional. The major version of Memcached software. If not - provided, latest supported version will be used. Currently - the latest supported major version is MEMCACHE_1_5. The - minor version will be automatically determined by our system - based on the latest supported minor version. - parameters (~.cloud_memcache.MemcacheParameters): + memcache_version (google.cloud.memcache_v1beta2.types.MemcacheVersion): + The major version of Memcached software. If not provided, + latest supported version will be used. Currently the latest + supported major version is ``MEMCACHE_1_5``. The minor + version will be automatically determined by our system based + on the latest supported minor version. + parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): Optional: User defined parameters to apply to the memcached process on each node. - memcache_nodes (Sequence[~.cloud_memcache.Instance.Node]): - Output only. List of Memcached nodes. Refer to [Node] - message for more details. - create_time (~.timestamp.Timestamp): + memcache_nodes (Sequence[google.cloud.memcache_v1beta2.types.Instance.Node]): + Output only. List of Memcached nodes. Refer to + [Node][google.cloud.memcache.v1beta2.Instance.Node] message + for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the instance was created. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the instance was updated. - state (~.cloud_memcache.Instance.State): + state (google.cloud.memcache_v1beta2.types.Instance.State): Output only. The state of this Memcached instance. memcache_full_version (str): @@ -119,17 +122,18 @@ class Instance(proto.Message): MemcacheVersion. The full version format will be "memcached-1.5.16". - instance_messages (Sequence[~.cloud_memcache.Instance.InstanceMessage]): - List of messages that describe current - statuses of memcached instance. + instance_messages (Sequence[google.cloud.memcache_v1beta2.types.Instance.InstanceMessage]): + List of messages that describe the current + state of the Memcached instance. discovery_endpoint (str): - Output only. Endpoint for Discovery API + Output only. Endpoint for the Discovery API. + update_available (bool): + Output only. Returns true if there is an + update waiting to be applied """ class State(proto.Enum): - r"""Different states of a Memcached instance. - LINT.IfChange - """ + r"""Different states of a Memcached instance.""" STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -162,7 +166,7 @@ class Node(proto.Message): zone (str): Output only. Location (GCP Zone) for the Memcached node. - state (~.cloud_memcache.Instance.Node.State): + state (google.cloud.memcache_v1beta2.types.Instance.Node.State): Output only. Current state of the Memcached node. host (str): @@ -172,15 +176,16 @@ class Node(proto.Message): port (int): Output only. The port number of the Memcached server on this node. - parameters (~.cloud_memcache.MemcacheParameters): + parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): User defined parameters currently applied to the node. + update_available (bool): + Output only. Returns true if there is an + update waiting to be applied """ class State(proto.Enum): - r"""Different states of a Memcached node. - LINT.IfChange - """ + r"""Different states of a Memcached node.""" STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -199,11 +204,13 @@ class State(proto.Enum): parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) + update_available = proto.Field(proto.BOOL, number=7) + class InstanceMessage(proto.Message): r""" Attributes: - code (~.cloud_memcache.Instance.InstanceMessage.Code): + code (google.cloud.memcache_v1beta2.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- acing message. message (str): @@ -254,6 +261,8 @@ class Code(proto.Enum): discovery_endpoint = proto.Field(proto.STRING, number=20) + update_available = proto.Field(proto.BOOL, number=21) + class ListInstancesRequest(proto.Message): r"""Request for @@ -268,18 +277,18 @@ class ListInstancesRequest(proto.Message): The maximum number of items to return. If not specified, a default value of 1000 will be used by - the service. Regardless of the page_size value, the response - may include a partial list and a caller should only rely on - response's - [next_page_token][CloudMemcache.ListInstancesResponse.next_page_token] + the service. Regardless of the ``page_size`` value, the + response may include a partial list and a caller should only + rely on response's + [``next_page_token``][google.cloud.memcache.v1beta2.ListInstancesResponse.next_page_token] to determine if there are more instances left to be queried. page_token (str): - The next_page_token value returned from a previous List + The ``next_page_token`` value returned from a previous List request, if any. filter (str): - List filter. For example, exclude all - Memcached instances with name as my-instance by - specifying "name != my-instance". + List filter. For example, exclude all Memcached instances + with name as my-instance by specifying + ``"name != my-instance"``. order_by (str): Sort results. Supported values are "name", "name desc" or "" (unsorted). @@ -301,7 +310,7 @@ class ListInstancesResponse(proto.Message): [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. Attributes: - resources (Sequence[~.cloud_memcache.Instance]): + resources (Sequence[google.cloud.memcache_v1beta2.types.Instance]): A list of Memcached instances in the project in the specified location, or across all locations. @@ -359,8 +368,11 @@ class CreateInstanceRequest(proto.Message): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location - resource (~.cloud_memcache.Instance): + - Must be unique within the user project / location. + + If any of the above are not met, the API raises an invalid + argument error. + resource (google.cloud.memcache_v1beta2.types.Instance): Required. A Memcached [Instance] resource """ @@ -376,11 +388,11 @@ class UpdateInstanceRequest(proto.Message): [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. Attributes: - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. - ``displayName`` - resource (~.cloud_memcache.Instance): + resource (google.cloud.memcache_v1beta2.types.Instance): Required. A Memcached [Instance] resource. Only fields specified in update_mask are updated. """ @@ -396,7 +408,7 @@ class DeleteInstanceRequest(proto.Message): Attributes: name (str): - Memcached instance resource name in the format: + Required. Memcached instance resource name in the format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region """ @@ -414,14 +426,13 @@ class ApplyParametersRequest(proto.Message): instance for which parameter group updates should be applied. node_ids (Sequence[str]): - Nodes to which we should apply the instance- - evel parameter group. + Nodes to which the instance-level parameter + group is applied. apply_all (bool): - Whether to apply instance-level parameter - group to all nodes. If set to true, will - explicitly restrict users from specifying any - nodes, and apply parameter group updates to all - nodes within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from specifying + individual nodes, and ``ApplyParameters`` updates all nodes + within the instance. """ name = proto.Field(proto.STRING, number=1) @@ -440,9 +451,9 @@ class UpdateParametersRequest(proto.Message): Required. Resource name of the Memcached instance for which the parameters should be updated. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. - parameters (~.cloud_memcache.MemcacheParameters): + parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): The parameters to apply to the instance. """ @@ -453,18 +464,45 @@ class UpdateParametersRequest(proto.Message): parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) +class ApplySoftwareUpdateRequest(proto.Message): + r"""Request for + [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. + + Attributes: + instance (str): + Required. Resource name of the Memcached + instance for which software update should be + applied. + node_ids (Sequence[str]): + Nodes to which we should apply the update to. + Note all the selected nodes are updated in + parallel. + apply_all (bool): + Whether to apply the update to all nodes. If + set to true, will explicitly restrict users from + specifying any nodes, and apply software update + to all nodes (where applicable) within the + instance. + """ + + instance = proto.Field(proto.STRING, number=1) + + node_ids = proto.RepeatedField(proto.STRING, number=2) + + apply_all = proto.Field(proto.BOOL, number=3) + + class MemcacheParameters(proto.Message): - r""" + r"""The unique ID associated with this set of parameters. Users + can use this id to determine if the parameters associated with + the instance differ from the parameters associated with the + nodes. A discrepancy between parameter ids can inform users that + they may need to take action to apply parameters on nodes. Attributes: id (str): - Output only. The unique ID associated with - this set of parameters. Users can use this id to - determine if the parameters associated with the - instance differ from the parameters associated - with the nodes and any action needs to be taken - to apply parameters on nodes. - params (Sequence[~.cloud_memcache.MemcacheParameters.ParamsEntry]): + Output only. + params (Sequence[google.cloud.memcache_v1beta2.types.MemcacheParameters.ParamsEntry]): User defined set of parameters to use in the memcached process. """ @@ -478,26 +516,30 @@ class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. Attributes: - create_time (~.timestamp.Timestamp): - Time when the operation was created. - end_time (~.timestamp.Timestamp): - Time when the operation finished running. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation finished + running. target (str): - Server-defined resource path for the target - of the operation. + Output only. Server-defined resource path for + the target of the operation. verb (str): - Name of the verb executed by the operation. + Output only. Name of the verb executed by the + operation. status_detail (str): - Human-readable status of the operation, if - any. + Output only. Human-readable status of the + operation, if any. cancel_requested (bool): - Identifies whether the user has requested cancellation of - the operation. Operations that have successfully been - cancelled have [Operation.error][] value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to ``Code.CANCELLED``. + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. api_version (str): - API version used to start the operation. + Output only. API version used to start the + operation. """ create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) @@ -520,7 +562,7 @@ class LocationMetadata(proto.Message): [google.cloud.location.Location][google.cloud.location.Location]. Attributes: - available_zones (Sequence[~.cloud_memcache.LocationMetadata.AvailableZonesEntry]): + available_zones (Sequence[google.cloud.memcache_v1beta2.types.LocationMetadata.AvailableZonesEntry]): Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by GCE. These keys can be specified in the ``zones`` field when diff --git a/noxfile.py b/noxfile.py index 8004482..9e90799 100644 --- a/noxfile.py +++ b/noxfile.py @@ -30,6 +30,17 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -75,6 +86,7 @@ def default(session): session.install( "mock", "pytest", "pytest-cov", ) + session.install("-e", ".") # Run py.test against the unit tests. diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py new file mode 100644 index 0000000..eac442a --- /dev/null +++ b/scripts/fixup_memcache_v1_keywords.py @@ -0,0 +1,185 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class memcacheCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'instance', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=memcacheCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the memcache client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index 459ba63..4267b6f 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -42,6 +42,7 @@ class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), 'create_instance': ('parent', 'instance_id', 'resource', ), 'delete_instance': ('name', ), 'get_instance': ('name', ), diff --git a/synth.py b/synth.py index 4b2ffa1..65a7ca3 100644 --- a/synth.py +++ b/synth.py @@ -22,7 +22,9 @@ gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -versions = ["v1beta2"] +versions = ["v1beta2", + "v1", + ] # add new versions at the end of the list # ---------------------------------------------------------------------------- # Generate memcache GAPIC layer diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/unit/gapic/memcache_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py new file mode 100644 index 0000000..896e3b1 --- /dev/null +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -0,0 +1,2575 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheAsyncClient +from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheClient +from google.cloud.memcache_v1.services.cloud_memcache import pagers +from google.cloud.memcache_v1.services.cloud_memcache import transports +from google.cloud.memcache_v1.types import cloud_memcache +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudMemcacheClient._get_default_mtls_endpoint(None) is None + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test_cloud_memcache_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = CloudMemcacheClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "memcache.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] +) +def test_cloud_memcache_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + + assert client.transport._host == "memcache.googleapis.com:443" + + +def test_cloud_memcache_client_get_transport_class(): + transport = CloudMemcacheClient.get_transport_class() + available_transports = [ + transports.CloudMemcacheGrpcTransport, + ] + assert transport in available_transports + + transport = CloudMemcacheClient.get_transport_class("grpc") + assert transport == transports.CloudMemcacheGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + CloudMemcacheClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheClient), +) +@mock.patch.object( + CloudMemcacheAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheAsyncClient), +) +def test_cloud_memcache_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "true"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "false"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + CloudMemcacheClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheClient), +) +@mock.patch.object( + CloudMemcacheAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_memcache_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_memcache_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_memcache_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_memcache_client_client_options_from_dict(): + with mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudMemcacheClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_instances( + transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListInstancesPager) + + assert response.next_page_token == "next_page_token_value" + + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_from_dict(): + test_list_instances(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = cloud_memcache.ListInstancesResponse() + + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse() + ) + + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_instances_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_instances_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_memcache.ListInstancesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + cloud_memcache.ListInstancesRequest(), parent="parent_value", + ) + + +def test_list_instances_pager(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, cloud_memcache.Instance) for i in results) + + +def test_list_instances_pages(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_memcache.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_instances(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_instance( + transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance( + name="name_value", + display_name="display_name_value", + authorized_network="authorized_network_value", + zones=["zones_value"], + node_count=1070, + memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, + state=cloud_memcache.Instance.State.CREATING, + memcache_full_version="memcache_full_version_value", + discovery_endpoint="discovery_endpoint_value", + ) + + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, cloud_memcache.Instance) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.authorized_network == "authorized_network_value" + + assert response.zones == ["zones_value"] + + assert response.node_count == 1070 + + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + + assert response.state == cloud_memcache.Instance.State.CREATING + + assert response.memcache_full_version == "memcache_full_version_value" + + assert response.discovery_endpoint == "discovery_endpoint_value" + + +def test_get_instance_from_dict(): + test_get_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance( + name="name_value", + display_name="display_name_value", + authorized_network="authorized_network_value", + zones=["zones_value"], + node_count=1070, + memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, + state=cloud_memcache.Instance.State.CREATING, + memcache_full_version="memcache_full_version_value", + discovery_endpoint="discovery_endpoint_value", + ) + ) + + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_memcache.Instance) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.authorized_network == "authorized_network_value" + + assert response.zones == ["zones_value"] + + assert response.node_count == 1070 + + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + + assert response.state == cloud_memcache.Instance.State.CREATING + + assert response.memcache_full_version == "memcache_full_version_value" + + assert response.discovery_endpoint == "discovery_endpoint_value" + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = cloud_memcache.Instance() + + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance() + ) + + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_memcache.GetInstanceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + cloud_memcache.GetInstanceRequest(), name="name_value", + ) + + +def test_create_instance( + transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_from_dict(): + test_create_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].instance == cloud_memcache.Instance(name="name_value") + + assert args[0].instance_id == "instance_id_value" + + +def test_create_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_memcache.CreateInstanceRequest(), + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].instance == cloud_memcache.Instance(name="name_value") + + assert args[0].instance_id == "instance_id_value" + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + cloud_memcache.CreateInstanceRequest(), + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_update_instance( + transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_from_dict(): + test_update_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateInstanceRequest() + request.instance.name = "instance.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateInstanceRequest() + request.instance.name = "instance.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[ + "metadata" + ] + + +def test_update_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].instance == cloud_memcache.Instance(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_memcache.UpdateInstanceRequest(), + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].instance == cloud_memcache.Instance(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + cloud_memcache.UpdateInstanceRequest(), + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_update_parameters( + transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_parameters_from_dict(): + test_update_parameters(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_parameters_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_parameters_async_from_dict(): + await test_update_parameters_async(request_type=dict) + + +def test_update_parameters_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_parameters_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_parameters_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_parameters( + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + + +def test_update_parameters_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_parameters( + cloud_memcache.UpdateParametersRequest(), + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + +@pytest.mark.asyncio +async def test_update_parameters_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_parameters( + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + + +@pytest.mark.asyncio +async def test_update_parameters_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_parameters( + cloud_memcache.UpdateParametersRequest(), + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + +def test_delete_instance( + transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_from_dict(): + test_delete_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_memcache.DeleteInstanceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + cloud_memcache.DeleteInstanceRequest(), name="name_value", + ) + + +def test_apply_parameters( + transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_parameters_from_dict(): + test_apply_parameters(request_type=dict) + + +@pytest.mark.asyncio +async def test_apply_parameters_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_parameters_async_from_dict(): + await test_apply_parameters_async(request_type=dict) + + +def test_apply_parameters_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_apply_parameters_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_apply_parameters_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.apply_parameters( + name="name_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +def test_apply_parameters_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_parameters( + cloud_memcache.ApplyParametersRequest(), + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +@pytest.mark.asyncio +async def test_apply_parameters_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.apply_parameters( + name="name_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +@pytest.mark.asyncio +async def test_apply_parameters_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.apply_parameters( + cloud_memcache.ApplyParametersRequest(), + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = CloudMemcacheClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudMemcacheGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) + + +def test_cloud_memcache_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.CloudMemcacheTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_memcache_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudMemcacheTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "update_parameters", + "delete_instance", + "apply_parameters", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +def test_cloud_memcache_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_memcache_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport() + adc.assert_called_once() + + +def test_cloud_memcache_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + CloudMemcacheClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_cloud_memcache_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cloud_memcache_host_no_port(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="memcache.googleapis.com" + ), + ) + assert client.transport._host == "memcache.googleapis.com:443" + + +def test_cloud_memcache_host_with_port(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="memcache.googleapis.com:8000" + ), + ) + assert client.transport._host == "memcache.googleapis.com:8000" + + +def test_cloud_memcache_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_memcache_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudMemcacheGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cloud_memcache_grpc_lro_client(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_memcache_grpc_lro_async_client(): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, location=location, instance=instance, + ) + actual = CloudMemcacheClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = CloudMemcacheClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_instance_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudMemcacheClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudMemcacheClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + + expected = "folders/{folder}".format(folder=folder,) + actual = CloudMemcacheClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudMemcacheClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = CloudMemcacheClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudMemcacheClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = CloudMemcacheClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudMemcacheClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = CloudMemcacheClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudMemcacheClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudMemcacheTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudMemcacheTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudMemcacheClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 3050c18..0671fea 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -92,8 +92,21 @@ def test__get_default_mtls_endpoint(): ) +def test_cloud_memcache_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = CloudMemcacheClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "memcache.googleapis.com:443" + + @pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient] + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -112,7 +125,10 @@ def test_cloud_memcache_client_from_service_account_file(client_class): def test_cloud_memcache_client_get_transport_class(): transport = CloudMemcacheClient.get_transport_class() - assert transport == transports.CloudMemcacheGrpcTransport + available_transports = [ + transports.CloudMemcacheGrpcTransport, + ] + assert transport in available_transports transport = CloudMemcacheClient.get_transport_class("grpc") assert transport == transports.CloudMemcacheGrpcTransport @@ -163,7 +179,7 @@ def test_cloud_memcache_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -179,7 +195,7 @@ def test_cloud_memcache_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -195,7 +211,7 @@ def test_cloud_memcache_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -223,7 +239,7 @@ def test_cloud_memcache_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -274,29 +290,25 @@ def test_cloud_memcache_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -305,66 +317,53 @@ def test_cloud_memcache_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -390,7 +389,7 @@ def test_cloud_memcache_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -420,7 +419,7 @@ def test_cloud_memcache_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -439,7 +438,7 @@ def test_cloud_memcache_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -807,6 +806,7 @@ def test_get_instance( state=cloud_memcache.Instance.State.CREATING, memcache_full_version="memcache_full_version_value", discovery_endpoint="discovery_endpoint_value", + update_available=True, ) response = client.get_instance(request) @@ -839,6 +839,8 @@ def test_get_instance( assert response.discovery_endpoint == "discovery_endpoint_value" + assert response.update_available is True + def test_get_instance_from_dict(): test_get_instance(request_type=dict) @@ -870,6 +872,7 @@ async def test_get_instance_async( state=cloud_memcache.Instance.State.CREATING, memcache_full_version="memcache_full_version_value", discovery_endpoint="discovery_endpoint_value", + update_available=True, ) ) @@ -902,6 +905,8 @@ async def test_get_instance_async( assert response.discovery_endpoint == "discovery_endpoint_value" + assert response.update_available is True + @pytest.mark.asyncio async def test_get_instance_async_from_dict(): @@ -2048,6 +2053,223 @@ async def test_apply_parameters_flattened_error_async(): ) +def test_apply_software_update( + transport: str = "grpc", request_type=cloud_memcache.ApplySoftwareUpdateRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.apply_software_update(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_software_update_from_dict(): + test_apply_software_update(request_type=dict) + + +@pytest.mark.asyncio +async def test_apply_software_update_async( + transport: str = "grpc_asyncio", + request_type=cloud_memcache.ApplySoftwareUpdateRequest, +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.apply_software_update(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_software_update_async_from_dict(): + await test_apply_software_update_async(request_type=dict) + + +def test_apply_software_update_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplySoftwareUpdateRequest() + request.instance = "instance/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.apply_software_update(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "instance=instance/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_apply_software_update_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplySoftwareUpdateRequest() + request.instance = "instance/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.apply_software_update(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "instance=instance/value",) in kw["metadata"] + + +def test_apply_software_update_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.apply_software_update( + instance="instance_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].instance == "instance_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +def test_apply_software_update_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_software_update( + cloud_memcache.ApplySoftwareUpdateRequest(), + instance="instance_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +@pytest.mark.asyncio +async def test_apply_software_update_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.apply_software_update( + instance="instance_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].instance == "instance_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +@pytest.mark.asyncio +async def test_apply_software_update_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.apply_software_update( + cloud_memcache.ApplySoftwareUpdateRequest(), + instance="instance_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( @@ -2152,6 +2374,7 @@ def test_cloud_memcache_base_transport(): "update_parameters", "delete_instance", "apply_parameters", + "apply_software_update", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2218,6 +2441,51 @@ def test_cloud_memcache_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_cloud_memcache_host_no_port(): client = CloudMemcacheClient( credentials=credentials.AnonymousCredentials(), @@ -2239,7 +2507,7 @@ def test_cloud_memcache_host_with_port(): def test_cloud_memcache_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudMemcacheGrpcTransport( @@ -2251,7 +2519,7 @@ def test_cloud_memcache_grpc_transport_channel(): def test_cloud_memcache_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudMemcacheGrpcAsyncIOTransport( @@ -2262,6 +2530,8 @@ def test_cloud_memcache_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2274,7 +2544,7 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2312,6 +2582,8 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2327,7 +2599,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel From cad0602b97f09d0241ba7edab27f6f88526c22ab Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 11 Feb 2021 16:06:02 +0000 Subject: [PATCH 009/159] chore: release 0.3.0 (#28) :robot: I have created a release \*beep\* \*boop\* --- ## [0.3.0](https://www.github.com/googleapis/python-memcache/compare/v0.2.0...v0.3.0) (2021-02-10) ### Features * add async client ([#26](https://www.github.com/googleapis/python-memcache/issues/26)) ([0bbc337](https://www.github.com/googleapis/python-memcache/commit/0bbc337594e2a44c51a5b372670d72499592e2e0)) * generate v1 ([#37](https://www.github.com/googleapis/python-memcache/issues/37)) ([7945daf](https://www.github.com/googleapis/python-memcache/commit/7945dafbbee1b21efc733e079044db77e880a10a)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 8 ++++++++ setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e4f20fc..1c6a712 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.3.0](https://www.github.com/googleapis/python-memcache/compare/v0.2.0...v0.3.0) (2021-02-10) + + +### Features + +* add async client ([#26](https://www.github.com/googleapis/python-memcache/issues/26)) ([0bbc337](https://www.github.com/googleapis/python-memcache/commit/0bbc337594e2a44c51a5b372670d72499592e2e0)) +* generate v1 ([#37](https://www.github.com/googleapis/python-memcache/issues/37)) ([7945daf](https://www.github.com/googleapis/python-memcache/commit/7945dafbbee1b21efc733e079044db77e880a10a)) + ## [0.2.0](https://www.github.com/googleapis/python-memcache/compare/v0.1.0...v0.2.0) (2020-05-28) diff --git a/setup.py b/setup.py index bae28ee..70ec372 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "0.2.0" +version = "0.3.0" package_root = os.path.abspath(os.path.dirname(__file__)) From f05345a9488cb73213ef40adf605f58878e95a0c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 1 Apr 2021 07:56:17 -0700 Subject: [PATCH 010/159] test: use constraints files to test lower bounds (#41) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore(python): fix column sizing issue in docs Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Thu Jan 7 11:58:32 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: f15b57ccfd71106c2299e9b89835fe6e55015662 Source-Link: https://github.com/googleapis/synthtool/commit/f15b57ccfd71106c2299e9b89835fe6e55015662 * chore(python): use 'http' in LICENSE Co-authored-by: Tim Swast Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Thu Jan 7 13:05:12 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 41a4e56982620d3edcf110d76f4fcdfdec471ac8 Source-Link: https://github.com/googleapis/synthtool/commit/41a4e56982620d3edcf110d76f4fcdfdec471ac8 * chore(python): skip docfx in main presubmit * chore(python): skip docfx in main presubmit * chore: add missing quotation mark Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Mon Jan 11 09:43:06 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 16ec872dd898d7de6e1822badfac32484b5d9031 Source-Link: https://github.com/googleapis/synthtool/commit/16ec872dd898d7de6e1822badfac32484b5d9031 * build(python): make `NOX_SESSION` optional I added this accidentally in #889. `NOX_SESSION` should be passed down if it is set but not marked required. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Jan 19 09:38:04 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: ba960d730416fe05c50547e975ce79fcee52c671 Source-Link: https://github.com/googleapis/synthtool/commit/ba960d730416fe05c50547e975ce79fcee52c671 * chore: add noxfile parameters for extra dependencies Also, add tests for some noxfile parameters for assurance that the template generates valid Python. Co-authored-by: Jeffrey Rennie Source-Author: Tim Swast Source-Date: Tue Jan 26 12:26:57 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 778d8beae28d6d87eb01fdc839a4b4d966ed2ebe Source-Link: https://github.com/googleapis/synthtool/commit/778d8beae28d6d87eb01fdc839a4b4d966ed2ebe * build: migrate to flakybot Source-Author: Justin Beckwith Source-Date: Thu Jan 28 22:22:38 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: d1bb9173100f62c0cfc8f3138b62241e7f47ca6a Source-Link: https://github.com/googleapis/synthtool/commit/d1bb9173100f62c0cfc8f3138b62241e7f47ca6a * chore(python): include py.typed files in release A py.typed file must be included in the released package for it to be considered typed by type checkers. https://www.python.org/dev/peps/pep-0561/#packaging-type-information. See https://github.com/googleapis/python-secret-manager/issues/79 Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Fri Feb 5 17:32:06 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 33366574ffb9e11737b3547eb6f020ecae0536e8 Source-Link: https://github.com/googleapis/synthtool/commit/33366574ffb9e11737b3547eb6f020ecae0536e8 * docs: update python contributing guide Adds details about blacken, updates version for system tests, and shows how to pass through pytest arguments. Source-Author: Chris Cotter Source-Date: Mon Feb 8 17:13:36 2021 -0500 Source-Repo: googleapis/synthtool Source-Sha: 4679e7e415221f03ff2a71e3ffad75b9ec41d87e Source-Link: https://github.com/googleapis/synthtool/commit/4679e7e415221f03ff2a71e3ffad75b9ec41d87e * build(python): enable flakybot on library unit and system tests Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Feb 17 14:10:46 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: d17674372e27fb8f23013935e794aa37502071aa Source-Link: https://github.com/googleapis/synthtool/commit/d17674372e27fb8f23013935e794aa37502071aa * test: install pyopenssl for mtls testing Source-Author: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Source-Date: Tue Mar 2 12:27:56 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: 0780323da96d5a53925fe0547757181fe76e8f1e Source-Link: https://github.com/googleapis/synthtool/commit/0780323da96d5a53925fe0547757181fe76e8f1e * chore: add pre-commit-config to renovate ignore paths Disable renovate PRs on the .pre-commit-config.yaml which is templated from synthtool. https://docs.renovatebot.com/configuration-options/#ignorepaths Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Mon Mar 15 09:05:39 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 2c54c473779ea731128cea61a3a6c975a08a5378 Source-Link: https://github.com/googleapis/synthtool/commit/2c54c473779ea731128cea61a3a6c975a08a5378 * build(python): fail nox sessions if a python version is missing Nox's default behavior is to quietly skip if a python interpreter is missing. https://nox.thea.codes/en/stable/usage.html#failing-sessions-when-the-interpreter-is-missing Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Mar 16 13:38:02 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: eda422b90c3dde4a872a13e6b78a8f802c40d0db Source-Link: https://github.com/googleapis/synthtool/commit/eda422b90c3dde4a872a13e6b78a8f802c40d0db * chore(python): add kokoro configs for periodic builds against head This change should be non-destructive. Note for library repo maintainers: After applying this change, you can easily add (or change) periodic builds against head by adding config files in google3. See python-pubsub repo for example. Source-Author: Takashi Matsuo Source-Date: Fri Mar 19 11:17:59 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 79c8dd7ee768292f933012d3a69a5b4676404cda Source-Link: https://github.com/googleapis/synthtool/commit/79c8dd7ee768292f933012d3a69a5b4676404cda * chore(deps): update precommit hook pycqa/flake8 to v3.9.0 [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pycqa/flake8](https://gitlab.com/pycqa/flake8) | repository | minor | `3.8.4` -> `3.9.0` | --- ### Release Notes
pycqa/flake8 ### [`v3.9.0`](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0) [Compare Source](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool). Source-Author: WhiteSource Renovate Source-Date: Tue Mar 23 17:38:03 2021 +0100 Source-Repo: googleapis/synthtool Source-Sha: f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 Source-Link: https://github.com/googleapis/synthtool/commit/f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 * test(python): use constraints files to check dependency lower bounds Use a constraints file when installing dependencies for system and unit tests nox sessions. https://pip.pypa.io/en/stable/user_guide/#constraints-files > Constraints files are requirements files that **only control which version of a requirement is installed, not whether it is installed or not**. Their syntax and contents is nearly identical to Requirements Files. There is one key difference: Including a package in a constraints file does not trigger installation of the package. ``` testing ├── constraints-3.10.txt ├── constraints-3.11.txt ├── constraints-3.6.txt ├── constraints-3.7.txt ├── constraints-3.8.txt └── constraints-3.9.txt ``` Going forward, one constraints file (currently 3.6) will be populated with every library requirement and extra listed in the `setup.py`. The constraints file will pin each requirement to the lower bound. This ensures that library maintainers will see test failures if they forget to update a lower bound on a dependency. See https://github.com/googleapis/python-bigquery/pull/263 for an example Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Mar 23 10:52:02 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 86ed43d4f56e6404d068e62e497029018879c771 Source-Link: https://github.com/googleapis/synthtool/commit/86ed43d4f56e6404d068e62e497029018879c771 * chore: add constraints files * chore: fix docstring * chore: exclude readme from gen Co-authored-by: Bu Sun Kim --- .gitignore | 4 +- .kokoro/build.sh | 10 ++ .kokoro/samples/python3.6/periodic-head.cfg | 11 ++ .kokoro/samples/python3.7/periodic-head.cfg | 11 ++ .kokoro/samples/python3.8/periodic-head.cfg | 11 ++ .kokoro/test-samples-against-head.sh | 28 ++++ .kokoro/test-samples-impl.sh | 102 +++++++++++++ .kokoro/test-samples.sh | 96 ++---------- .pre-commit-config.yaml | 2 +- LICENSE | 7 +- .../services/cloud_memcache/async_client.py | 35 ++++- .../services/cloud_memcache/client.py | 3 +- .../cloud_memcache/transports/base.py | 18 +-- .../cloud_memcache/transports/grpc.py | 103 +++++-------- .../cloud_memcache/transports/grpc_asyncio.py | 111 ++++++-------- google/cloud/memcache_v1/types/__init__.py | 24 +-- .../cloud/memcache_v1/types/cloud_memcache.py | 2 +- .../services/cloud_memcache/async_client.py | 33 +++- .../services/cloud_memcache/client.py | 1 - .../cloud_memcache/transports/base.py | 18 +-- .../cloud_memcache/transports/grpc.py | 103 +++++-------- .../cloud_memcache/transports/grpc_asyncio.py | 111 ++++++-------- .../cloud/memcache_v1beta2/types/__init__.py | 32 ++-- noxfile.py | 49 ++++-- renovate.json | 3 +- setup.py | 2 - synth.metadata | 23 ++- synth.py | 7 +- testing/constraints-3.6.txt | 9 ++ testing/constraints-3.7.txt | 2 + testing/constraints-3.8.txt | 2 + testing/constraints-3.9.txt | 2 + tests/unit/gapic/memcache_v1/__init__.py | 15 ++ .../gapic/memcache_v1/test_cloud_memcache.py | 124 ++++++++++++++- tests/unit/gapic/memcache_v1beta2/__init__.py | 15 ++ .../memcache_v1beta2/test_cloud_memcache.py | 142 +++++++++++++++++- 36 files changed, 832 insertions(+), 439 deletions(-) create mode 100644 .kokoro/samples/python3.6/periodic-head.cfg create mode 100644 .kokoro/samples/python3.7/periodic-head.cfg create mode 100644 .kokoro/samples/python3.8/periodic-head.cfg create mode 100755 .kokoro/test-samples-against-head.sh create mode 100755 .kokoro/test-samples-impl.sh create mode 100644 testing/constraints-3.6.txt create mode 100644 testing/constraints-3.7.txt create mode 100644 testing/constraints-3.8.txt create mode 100644 testing/constraints-3.9.txt diff --git a/.gitignore b/.gitignore index b9daa52..b4243ce 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 7640bf3..03a459c 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -40,6 +40,16 @@ python3 -m pip uninstall --yes --quiet nox-automation python3 -m pip install --upgrade --quiet nox python3 -m nox --version +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 0000000..113d9c2 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-memcache + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 0000000..cf5de74 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 87c7159..e1c8a45 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-memcache # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9024b1..32302e4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/LICENSE b/LICENSE index a8ee855..d645695 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index fd21c36..b09fddb 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -100,8 +100,36 @@ class CloudMemcacheAsyncClient: CloudMemcacheClient.parse_common_location_path ) - from_service_account_info = CloudMemcacheClient.from_service_account_info - from_service_account_file = CloudMemcacheClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheAsyncClient: The constructed client. + """ + return CloudMemcacheClient.from_service_account_info.__func__(CloudMemcacheAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheAsyncClient: The constructed client. + """ + return CloudMemcacheClient.from_service_account_file.__func__(CloudMemcacheAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -362,7 +390,7 @@ async def create_instance( - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location. + - Must be unique within the user project / location If any of the above are not met, will raise an invalid argument error. @@ -464,7 +492,6 @@ async def update_instance( - ``displayName`` - This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 0ae3f3e..591ee49 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -564,7 +564,7 @@ def create_instance( - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location. + - Must be unique within the user project / location If any of the above are not met, will raise an invalid argument error. @@ -667,7 +667,6 @@ def update_instance( - ``displayName`` - This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index a6a2b3c..c9b57c7 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -69,10 +69,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -80,6 +80,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -89,20 +92,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index bc03a88..61c7dde 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -126,7 +126,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -134,70 +137,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -205,18 +188,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -230,7 +203,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index c11f7c5..d669536 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -81,7 +81,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -159,10 +159,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -171,7 +171,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -179,70 +182,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -250,18 +233,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index 5fe285a..29acce6 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -16,31 +16,31 @@ # from .cloud_memcache import ( + ApplyParametersRequest, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, Instance, ListInstancesRequest, ListInstancesResponse, - GetInstanceRequest, - CreateInstanceRequest, - UpdateInstanceRequest, - DeleteInstanceRequest, - ApplyParametersRequest, - UpdateParametersRequest, MemcacheParameters, OperationMetadata, + UpdateInstanceRequest, + UpdateParametersRequest, MemcacheVersion, ) __all__ = ( + "ApplyParametersRequest", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", "Instance", "ListInstancesRequest", "ListInstancesResponse", - "GetInstanceRequest", - "CreateInstanceRequest", - "UpdateInstanceRequest", - "DeleteInstanceRequest", - "ApplyParametersRequest", - "UpdateParametersRequest", "MemcacheParameters", "OperationMetadata", + "UpdateInstanceRequest", + "UpdateParametersRequest", "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index 066bc9d..a148657 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -352,7 +352,7 @@ class CreateInstanceRequest(proto.Message): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location. + - Must be unique within the user project / location If any of the above are not met, will raise an invalid argument error. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index d0d7b85..b687488 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -100,8 +100,36 @@ class CloudMemcacheAsyncClient: CloudMemcacheClient.parse_common_location_path ) - from_service_account_info = CloudMemcacheClient.from_service_account_info - from_service_account_file = CloudMemcacheClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheAsyncClient: The constructed client. + """ + return CloudMemcacheClient.from_service_account_info.__func__(CloudMemcacheAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheAsyncClient: The constructed client. + """ + return CloudMemcacheClient.from_service_account_file.__func__(CloudMemcacheAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -458,7 +486,6 @@ async def update_instance( - ``displayName`` - This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 0da8b35..8d43719 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -661,7 +661,6 @@ def update_instance( - ``displayName`` - This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 2c598e4..a1b9e2c 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -69,10 +69,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -80,6 +80,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -89,20 +92,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index d1ffc02..f5f202f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -126,7 +126,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -134,70 +137,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -205,18 +188,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -230,7 +203,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index 24f0f89..f2452b4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -81,7 +81,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -159,10 +159,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -171,7 +171,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -179,70 +182,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -250,18 +233,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 9fa6727..90cf3eb 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -16,37 +16,37 @@ # from .cloud_memcache import ( + ApplyParametersRequest, + ApplySoftwareUpdateRequest, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, Instance, ListInstancesRequest, ListInstancesResponse, - GetInstanceRequest, - CreateInstanceRequest, - UpdateInstanceRequest, - DeleteInstanceRequest, - ApplyParametersRequest, - UpdateParametersRequest, - ApplySoftwareUpdateRequest, + LocationMetadata, MemcacheParameters, OperationMetadata, - LocationMetadata, + UpdateInstanceRequest, + UpdateParametersRequest, ZoneMetadata, MemcacheVersion, ) __all__ = ( + "ApplyParametersRequest", + "ApplySoftwareUpdateRequest", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", "Instance", "ListInstancesRequest", "ListInstancesResponse", - "GetInstanceRequest", - "CreateInstanceRequest", - "UpdateInstanceRequest", - "DeleteInstanceRequest", - "ApplyParametersRequest", - "UpdateParametersRequest", - "ApplySoftwareUpdateRequest", + "LocationMetadata", "MemcacheParameters", "OperationMetadata", - "LocationMetadata", + "UpdateInstanceRequest", + "UpdateParametersRequest", "ZoneMetadata", "MemcacheVersion", ) diff --git a/noxfile.py b/noxfile.py index 9e90799..0eb433e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,8 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", @@ -41,6 +44,9 @@ "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -81,18 +87,21 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install("-e", ".") + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -113,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -122,6 +134,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -134,16 +149,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -186,9 +211,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/renovate.json b/renovate.json index 4fa9493..f08bc22 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/setup.py b/setup.py index 70ec372..94938bf 100644 --- a/setup.py +++ b/setup.py @@ -44,8 +44,6 @@ "proto-plus >= 1.4.0", ), python_requires=">=3.6", - setup_requires=["libcst >= 0.2.5"], - scripts=["scripts/fixup_keywords.py"], classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", diff --git a/synth.metadata b/synth.metadata index b83959d..df869eb 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,21 +4,29 @@ "git": { "name": ".", "remote": "git@github.com:googleapis/python-memcache", - "sha": "b068bfca843c0d792bb2b79f5b6b28fcc80ae7c8" + "sha": "0f8357803b2ae2e06115d36ef4e84967a8ba1256" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "95dd24960cf9f794ef583e59ad9f1fabe1c4a924", + "internalRef": "365882072" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" + "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" + "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" } } ], @@ -31,6 +39,15 @@ "language": "python", "generator": "bazel" } + }, + { + "client": { + "source": "googleapis", + "apiName": "memcache", + "apiVersion": "v1", + "language": "python", + "generator": "bazel" + } } ] } \ No newline at end of file diff --git a/synth.py b/synth.py index 65a7ca3..b097003 100644 --- a/synth.py +++ b/synth.py @@ -38,15 +38,10 @@ excludes = [ "setup.py", "docs/index.rst", + "README.rst" ] s.move(library, excludes=excludes) -# Add extra linebreak after bulleted list to appease sphinx -s.replace("google/**/*client.py", """- Must be unique within the user project / location""", -"""- Must be unique within the user project / location\n""") -s.replace("google/**/*client.py", "- ``displayName``", "- ``displayName``\n") - - # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 0000000..a929701 --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. + +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.22.2 +proto-plus==1.15.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 0000000..da93009 --- /dev/null +++ b/testing/constraints-3.7.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 0000000..da93009 --- /dev/null +++ b/testing/constraints-3.8.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 0000000..da93009 --- /dev/null +++ b/testing/constraints-3.9.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py index 8b13789..42ffdf2 100644 --- a/tests/unit/gapic/memcache_v1/__init__.py +++ b/tests/unit/gapic/memcache_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 896e3b1..b4793ec 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -90,15 +90,19 @@ def test__get_default_mtls_endpoint(): ) -def test_cloud_memcache_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] +) +def test_cloud_memcache_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = CloudMemcacheClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "memcache.googleapis.com:443" @@ -114,9 +118,11 @@ def test_cloud_memcache_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "memcache.googleapis.com:443" @@ -481,6 +487,22 @@ def test_list_instances_from_dict(): test_list_instances(request_type=dict) +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + @pytest.mark.asyncio async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest @@ -841,6 +863,22 @@ def test_get_instance_from_dict(): test_get_instance(request_type=dict) +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + @pytest.mark.asyncio async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest @@ -1055,6 +1093,22 @@ def test_create_instance_from_dict(): test_create_instance(request_type=dict) +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + @pytest.mark.asyncio async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest @@ -1263,6 +1317,22 @@ def test_update_instance_from_dict(): test_update_instance(request_type=dict) +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + @pytest.mark.asyncio async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest @@ -1469,6 +1539,24 @@ def test_update_parameters_from_dict(): test_update_parameters(request_type=dict) +def test_update_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + client.update_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + @pytest.mark.asyncio async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest @@ -1687,6 +1775,22 @@ def test_delete_instance_from_dict(): test_delete_instance(request_type=dict) +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + @pytest.mark.asyncio async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest @@ -1873,6 +1977,22 @@ def test_apply_parameters_from_dict(): test_apply_parameters(request_type=dict) +def test_apply_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + client.apply_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + @pytest.mark.asyncio async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest diff --git a/tests/unit/gapic/memcache_v1beta2/__init__.py b/tests/unit/gapic/memcache_v1beta2/__init__.py index 8b13789..42ffdf2 100644 --- a/tests/unit/gapic/memcache_v1beta2/__init__.py +++ b/tests/unit/gapic/memcache_v1beta2/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 0671fea..1e242ab 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -92,15 +92,19 @@ def test__get_default_mtls_endpoint(): ) -def test_cloud_memcache_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] +) +def test_cloud_memcache_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = CloudMemcacheClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "memcache.googleapis.com:443" @@ -116,9 +120,11 @@ def test_cloud_memcache_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "memcache.googleapis.com:443" @@ -483,6 +489,22 @@ def test_list_instances_from_dict(): test_list_instances(request_type=dict) +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + @pytest.mark.asyncio async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest @@ -846,6 +868,22 @@ def test_get_instance_from_dict(): test_get_instance(request_type=dict) +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + @pytest.mark.asyncio async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest @@ -1063,6 +1101,22 @@ def test_create_instance_from_dict(): test_create_instance(request_type=dict) +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + @pytest.mark.asyncio async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest @@ -1271,6 +1325,22 @@ def test_update_instance_from_dict(): test_update_instance(request_type=dict) +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + @pytest.mark.asyncio async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest @@ -1477,6 +1547,24 @@ def test_update_parameters_from_dict(): test_update_parameters(request_type=dict) +def test_update_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + client.update_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + @pytest.mark.asyncio async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest @@ -1695,6 +1783,22 @@ def test_delete_instance_from_dict(): test_delete_instance(request_type=dict) +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + @pytest.mark.asyncio async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest @@ -1881,6 +1985,22 @@ def test_apply_parameters_from_dict(): test_apply_parameters(request_type=dict) +def test_apply_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + client.apply_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + @pytest.mark.asyncio async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest @@ -2087,6 +2207,24 @@ def test_apply_software_update_from_dict(): test_apply_software_update(request_type=dict) +def test_apply_software_update_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + client.apply_software_update() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() + + @pytest.mark.asyncio async def test_apply_software_update_async( transport: str = "grpc_asyncio", From 83fc2cbfd24ff884a7e96fc03be543c909d26aac Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 2 Apr 2021 15:10:47 -0700 Subject: [PATCH 011/159] chore: start tracking obsolete files (#43) --- synth.metadata | 126 ++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 120 insertions(+), 6 deletions(-) diff --git a/synth.metadata b/synth.metadata index df869eb..482c504 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-memcache", - "sha": "0f8357803b2ae2e06115d36ef4e84967a8ba1256" + "remote": "https://github.com/googleapis/python-memcache.git", + "sha": "f05345a9488cb73213ef40adf605f58878e95a0c" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "95dd24960cf9f794ef583e59ad9f1fabe1c4a924", - "internalRef": "365882072" + "sha": "56fc6d43fed71188d7e18f3ca003544646c4ab35", + "internalRef": "366346972" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" + "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" + "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6" } } ], @@ -49,5 +49,119 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".coveragerc", + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/header-checker-lint.yml", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic-head.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic-head.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic-head.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples-against-head.sh", + ".kokoro/test-samples-impl.sh", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".pre-commit-config.yaml", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "docs/memcache_v1/cloud_memcache.rst", + "docs/memcache_v1/services.rst", + "docs/memcache_v1/types.rst", + "docs/memcache_v1beta2/cloud_memcache.rst", + "docs/memcache_v1beta2/services.rst", + "docs/memcache_v1beta2/types.rst", + "docs/multiprocessing.rst", + "google/cloud/memcache/__init__.py", + "google/cloud/memcache/py.typed", + "google/cloud/memcache_v1/__init__.py", + "google/cloud/memcache_v1/py.typed", + "google/cloud/memcache_v1/services/__init__.py", + "google/cloud/memcache_v1/services/cloud_memcache/__init__.py", + "google/cloud/memcache_v1/services/cloud_memcache/async_client.py", + "google/cloud/memcache_v1/services/cloud_memcache/client.py", + "google/cloud/memcache_v1/services/cloud_memcache/pagers.py", + "google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py", + "google/cloud/memcache_v1/services/cloud_memcache/transports/base.py", + "google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py", + "google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py", + "google/cloud/memcache_v1/types/__init__.py", + "google/cloud/memcache_v1/types/cloud_memcache.py", + "google/cloud/memcache_v1beta2/__init__.py", + "google/cloud/memcache_v1beta2/py.typed", + "google/cloud/memcache_v1beta2/services/__init__.py", + "google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py", + "google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py", + "google/cloud/memcache_v1beta2/services/cloud_memcache/client.py", + "google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py", + "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py", + "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py", + "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py", + "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py", + "google/cloud/memcache_v1beta2/types/__init__.py", + "google/cloud/memcache_v1beta2/types/cloud_memcache.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "scripts/decrypt-secrets.sh", + "scripts/fixup_memcache_v1_keywords.py", + "scripts/fixup_memcache_v1beta2_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/memcache_v1/__init__.py", + "tests/unit/gapic/memcache_v1/test_cloud_memcache.py", + "tests/unit/gapic/memcache_v1beta2/__init__.py", + "tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py" ] } \ No newline at end of file From 2cb7c928081f008e9313c655c7562bb03d810133 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 7 Apr 2021 08:16:06 -0700 Subject: [PATCH 012/159] chore: Add license headers for python config files (#45) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/d9d975fd-7d94-4e18-acf5-f81484b3e675/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) Source-Link: https://github.com/googleapis/synthtool/commit/5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc --- .pre-commit-config.yaml | 14 ++++++++++++++ docs/conf.py | 13 +++++++++++++ synth.metadata | 6 +++--- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 32302e4..8912e9b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: diff --git a/docs/conf.py b/docs/conf.py index a84e615..2559e64 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-memcache documentation build configuration file # diff --git a/synth.metadata b/synth.metadata index 482c504..9699f0d 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-memcache.git", - "sha": "f05345a9488cb73213ef40adf605f58878e95a0c" + "sha": "83fc2cbfd24ff884a7e96fc03be543c909d26aac" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } } ], From 68ed93b390afe485ab726ae1afc2678b4f4695f7 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 7 Apr 2021 13:12:01 -0700 Subject: [PATCH 013/159] chore: add tarfile (#44) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/ece0a7a2-2f86-41db-8b2c-45661f964b28/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) --- memcache-v1beta2-py.tar.gz | 0 synth.metadata | 1 + 2 files changed, 1 insertion(+) create mode 100644 memcache-v1beta2-py.tar.gz diff --git a/memcache-v1beta2-py.tar.gz b/memcache-v1beta2-py.tar.gz new file mode 100644 index 0000000..e69de29 diff --git a/synth.metadata b/synth.metadata index 9699f0d..e2b5a21 100644 --- a/synth.metadata +++ b/synth.metadata @@ -145,6 +145,7 @@ "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py", "google/cloud/memcache_v1beta2/types/__init__.py", "google/cloud/memcache_v1beta2/types/cloud_memcache.py", + "memcache-v1beta2-py.tar.gz", "mypy.ini", "noxfile.py", "renovate.json", From 64c0313eda37046c9791b0f83f7df70310427dc2 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Fri, 16 Apr 2021 17:31:45 -0400 Subject: [PATCH 014/159] chore: prevent normalization of semver versioning (#48) * chore: prevent normalization of semver versioning * chore: update workaround to make sic work --- setup.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 94938bf..1abcf5d 100644 --- a/setup.py +++ b/setup.py @@ -19,6 +19,21 @@ import os import setuptools # type: ignore +# Disable version normalization performed by setuptools.setup() +try: + # Try the approach of using sic(), added in setuptools 46.1.0 + from setuptools import sic +except ImportError: + # Try the approach of replacing packaging.version.Version + sic = lambda v: v + try: + # setuptools >=39.0.0 uses packaging from setuptools.extern + from setuptools.extern import packaging + except ImportError: + # setuptools <39.0.0 uses packaging from pkg_resources.extern + from pkg_resources.extern import packaging + packaging.version.Version = packaging.version.LegacyVersion + version = "0.3.0" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -29,7 +44,7 @@ setuptools.setup( name="google-cloud-memcache", - version=version, + version=sic(version), long_description=readme, author="Google LLC", author_email="googleapis-packages@google.com", From 9fa0446f46e98c701e92fce6388c4093e880f40c Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 26 Apr 2021 20:49:59 -0400 Subject: [PATCH 015/159] chore(revert): revert preventing normalization (#49) --- setup.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/setup.py b/setup.py index 1abcf5d..94938bf 100644 --- a/setup.py +++ b/setup.py @@ -19,21 +19,6 @@ import os import setuptools # type: ignore -# Disable version normalization performed by setuptools.setup() -try: - # Try the approach of using sic(), added in setuptools 46.1.0 - from setuptools import sic -except ImportError: - # Try the approach of replacing packaging.version.Version - sic = lambda v: v - try: - # setuptools >=39.0.0 uses packaging from setuptools.extern - from setuptools.extern import packaging - except ImportError: - # setuptools <39.0.0 uses packaging from pkg_resources.extern - from pkg_resources.extern import packaging - packaging.version.Version = packaging.version.LegacyVersion - version = "0.3.0" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -44,7 +29,7 @@ setuptools.setup( name="google-cloud-memcache", - version=sic(version), + version=version, long_description=readme, author="Google LLC", author_email="googleapis-packages@google.com", From c53912a3abdde69c4440411cb3af07641acf4418 Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Wed, 12 May 2021 13:18:08 +0000 Subject: [PATCH 016/159] chore: add SECURITY.md (#51) chore: add SECURITY.md --- SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..8b58ae9 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From 6523159e682634dcf7ef5c1a81073b5ab05bbfe5 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 12 May 2021 18:30:02 -0400 Subject: [PATCH 017/159] chore: add library type to .repo-metadata.json (#54) --- .repo-metadata.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.repo-metadata.json b/.repo-metadata.json index c0fd1d9..7bff6b7 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "", "release_level": "beta", "language": "python", + "library_type": "GAPIC_AUTO", "repo": "googleapis/python-memcache", "distribution_name": "google-cloud-memcache", "api_id": "memcache.googleapis.com" From 563537d47016a7a24ee826379e1804c8de0da184 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 12 May 2021 18:46:01 -0400 Subject: [PATCH 018/159] chore: migrate to owl bot (#55) This PR migrates from autosynth to [owl bot](https://github.com/googleapis/repo-automation-bots/tree/master/packages/owl-bot). owl bot will save time for maintainers as it will automatically open PRs when there are updates in [googleapis-gen](https://github.com/googleapis/googleapis-gen/tree/master/google) without requiring maintainers to run `synthtool` to build the client from protos. Additionally, similar to autosynth, PRs will be automatically opened when there are template updates. --- .github/.OwlBot.lock.yaml | 4 + .github/.OwlBot.yaml | 26 +++++ .github/header-checker-lint.yml | 2 +- .kokoro/release.sh | 4 +- .kokoro/release/common.cfg | 14 +-- .pre-commit-config.yaml | 2 +- CONTRIBUTING.rst | 16 +-- docs/_static/custom.css | 13 ++- noxfile.py | 14 +-- synth.py => owlbot.py | 17 +--- renovate.json | 5 +- synth.metadata | 168 -------------------------------- 12 files changed, 59 insertions(+), 226 deletions(-) create mode 100644 .github/.OwlBot.lock.yaml create mode 100644 .github/.OwlBot.yaml rename synth.py => owlbot.py (72%) delete mode 100644 synth.metadata diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 0000000..d49860b --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 0000000..1f364a6 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/memcache/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: 6a5da3f1274b088752f074da5bc9e30bd1beb27e + diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml index fc281c0..6fe78aa 100644 --- a/.github/header-checker-lint.yml +++ b/.github/header-checker-lint.yml @@ -1,6 +1,6 @@ {"allowedCopyrightHolders": ["Google LLC"], "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ "ts", "js", diff --git a/.kokoro/release.sh b/.kokoro/release.sh index ee38821..01e25c7 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-memcache python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index b4b40c4..921b289 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-memcache/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8912e9b..1bbd787 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.1 hooks: - id: flake8 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index aacba05..f45c3c2 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -160,21 +160,7 @@ Running System Tests auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage diff --git a/docs/_static/custom.css b/docs/_static/custom.css index bcd37bb..b0a2954 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,9 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} +} /* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/noxfile.py b/noxfile.py index 0eb433e..04c230d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -62,16 +62,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -131,9 +124,6 @@ def system(session): # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") # Install pyopenssl for mTLS testing. if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": session.install("pyopenssl") diff --git a/synth.py b/owlbot.py similarity index 72% rename from synth.py rename to owlbot.py index b097003..fb3ae3a 100644 --- a/synth.py +++ b/owlbot.py @@ -19,22 +19,11 @@ import synthtool.gcp as gcp from synthtool.languages import python -gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -versions = ["v1beta2", - "v1", - ] # add new versions at the end of the list - -# ---------------------------------------------------------------------------- -# Generate memcache GAPIC layer -# ---------------------------------------------------------------------------- -for version in versions: - library = gapic.py_library( - service="memcache", - version=version, - bazel_target=f"//google/cloud/memcache/{version}:memcache-{version}-py") +default_version = "v1" +for library in s.get_staging_dirs(default_version): excludes = [ "setup.py", "docs/index.rst", @@ -42,6 +31,8 @@ ] s.move(library, excludes=excludes) +s.remove_staging_dirs() + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- diff --git a/renovate.json b/renovate.json index f08bc22..c048955 100644 --- a/renovate.json +++ b/renovate.json @@ -2,5 +2,8 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "ignorePaths": [".pre-commit-config.yaml"] + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index e2b5a21..0000000 --- a/synth.metadata +++ /dev/null @@ -1,168 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-memcache.git", - "sha": "83fc2cbfd24ff884a7e96fc03be543c909d26aac" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "56fc6d43fed71188d7e18f3ca003544646c4ab35", - "internalRef": "366346972" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "memcache", - "apiVersion": "v1beta2", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "memcache", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic-head.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic-head.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic-head.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples-against-head.sh", - ".kokoro/test-samples-impl.sh", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/memcache_v1/cloud_memcache.rst", - "docs/memcache_v1/services.rst", - "docs/memcache_v1/types.rst", - "docs/memcache_v1beta2/cloud_memcache.rst", - "docs/memcache_v1beta2/services.rst", - "docs/memcache_v1beta2/types.rst", - "docs/multiprocessing.rst", - "google/cloud/memcache/__init__.py", - "google/cloud/memcache/py.typed", - "google/cloud/memcache_v1/__init__.py", - "google/cloud/memcache_v1/py.typed", - "google/cloud/memcache_v1/services/__init__.py", - "google/cloud/memcache_v1/services/cloud_memcache/__init__.py", - "google/cloud/memcache_v1/services/cloud_memcache/async_client.py", - "google/cloud/memcache_v1/services/cloud_memcache/client.py", - "google/cloud/memcache_v1/services/cloud_memcache/pagers.py", - "google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py", - "google/cloud/memcache_v1/services/cloud_memcache/transports/base.py", - "google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py", - "google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py", - "google/cloud/memcache_v1/types/__init__.py", - "google/cloud/memcache_v1/types/cloud_memcache.py", - "google/cloud/memcache_v1beta2/__init__.py", - "google/cloud/memcache_v1beta2/py.typed", - "google/cloud/memcache_v1beta2/services/__init__.py", - "google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py", - "google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py", - "google/cloud/memcache_v1beta2/services/cloud_memcache/client.py", - "google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py", - "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py", - "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py", - "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py", - "google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py", - "google/cloud/memcache_v1beta2/types/__init__.py", - "google/cloud/memcache_v1beta2/types/cloud_memcache.py", - "memcache-v1beta2-py.tar.gz", - "mypy.ini", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/fixup_memcache_v1_keywords.py", - "scripts/fixup_memcache_v1beta2_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/memcache_v1/__init__.py", - "tests/unit/gapic/memcache_v1/test_cloud_memcache.py", - "tests/unit/gapic/memcache_v1beta2/__init__.py", - "tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py" - ] -} \ No newline at end of file From 266e4069d09227f6379b553bf426f7787a890743 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 16 May 2021 13:28:01 +0000 Subject: [PATCH 019/159] chore: new owl bot post processor docker image (#58) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa --- .github/.OwlBot.lock.yaml | 5 ++--- .pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d49860b..864c176 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,3 @@ docker: - digest: sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e - image: gcr.io/repo-automation-bots/owlbot-python:latest - + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1bbd787..4f00c7c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.1 + rev: 3.9.2 hooks: - id: flake8 From b8d9394dd34b97ddd68f8c73a5f516ba5294a70c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 18 May 2021 12:48:09 -0400 Subject: [PATCH 020/159] feat: bump release level to production/stable (#59) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #38 🩕 --- .repo-metadata.json | 2 +- README.rst | 6 +++--- setup.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index 7bff6b7..718be57 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -4,7 +4,7 @@ "product_documentation": "cloud.google.com/memorystore/docs/memcached/", "client_documentation": "https://googleapis.dev/python/memcache/latest", "issue_tracker": "", - "release_level": "beta", + "release_level": "ga", "language": "python", "library_type": "GAPIC_AUTO", "repo": "googleapis/python-memcache", diff --git a/README.rst b/README.rst index a2a93a7..74d0643 100644 --- a/README.rst +++ b/README.rst @@ -1,7 +1,7 @@ Python Client for Cloud Memorystore for Memcached ================================================= -|beta| |pypi| |versions| +|GA| |pypi| |versions| `Cloud Memorystore for Memached API`_: Ship performant and highly available key value store, compatible with OSS Memcached protocol. @@ -9,8 +9,8 @@ compatible with OSS Memcached protocol. - `Client Library Documentation`_ - `Product Documentation`_ -.. |beta| image:: https://img.shields.io/badge/support-beta-orange.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#beta-support +.. |GA| image:: https://img.shields.io/badge/support-ga-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-memcache.svg :target: https://pypi.org/project/google-cloud-memcache/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-memcache.svg diff --git a/setup.py b/setup.py index 94938bf..e9985ee 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,7 @@ ), python_requires=">=3.6", classifiers=[ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 3.6", From 2ad1bfbee1f847c1b150b0e1595faba63f42d768 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 20 May 2021 12:52:57 -0600 Subject: [PATCH 021/159] chore: upgrade gapic-generator-python to 0.46.3 (#57) PiperOrigin-RevId: 373649163 Source-Link: https://github.com/googleapis/googleapis/commit/7e1b14e6c7a9ab96d2db7e4a131981f162446d34 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0a3c7d272d697796db75857bac73905c68e498c3 feat: support self-signed JWT flow for service accounts fix: add async client to %name_%version/init.py chore: add autogenerated snippets chore: remove auth, policy, and options from the reserved names list chore: enable GAPIC metadata generation chore: sort subpackages in %namespace/%name/init.py --- docs/memcache_v1/cloud_memcache.rst | 1 - docs/memcache_v1beta2/cloud_memcache.rst | 1 - google/cloud/memcache/__init__.py | 12 +- google/cloud/memcache_v1/__init__.py | 9 +- google/cloud/memcache_v1/gapic_metadata.json | 93 +++ google/cloud/memcache_v1/services/__init__.py | 1 - .../services/cloud_memcache/__init__.py | 2 - .../services/cloud_memcache/async_client.py | 51 +- .../services/cloud_memcache/client.py | 94 +-- .../services/cloud_memcache/pagers.py | 4 +- .../cloud_memcache/transports/__init__.py | 2 - .../cloud_memcache/transports/base.py | 132 +++- .../cloud_memcache/transports/grpc.py | 44 +- .../cloud_memcache/transports/grpc_asyncio.py | 45 +- google/cloud/memcache_v1/types/__init__.py | 2 - .../cloud/memcache_v1/types/cloud_memcache.py | 151 ++-- google/cloud/memcache_v1beta2/__init__.py | 9 +- .../memcache_v1beta2/gapic_metadata.json | 103 +++ .../memcache_v1beta2/services/__init__.py | 1 - .../services/cloud_memcache/__init__.py | 2 - .../services/cloud_memcache/async_client.py | 54 +- .../services/cloud_memcache/client.py | 97 +-- .../services/cloud_memcache/pagers.py | 4 +- .../cloud_memcache/transports/__init__.py | 2 - .../cloud_memcache/transports/base.py | 136 ++-- .../cloud_memcache/transports/grpc.py | 50 +- .../cloud_memcache/transports/grpc_asyncio.py | 49 +- .../cloud/memcache_v1beta2/types/__init__.py | 2 - .../memcache_v1beta2/types/cloud_memcache.py | 166 ++--- scripts/fixup_memcache_v1_keywords.py | 19 +- scripts/fixup_memcache_v1beta2_keywords.py | 21 +- setup.py | 1 + testing/constraints-3.6.txt | 2 + tests/__init__.py | 15 + tests/unit/__init__.py | 15 + tests/unit/gapic/__init__.py | 15 + tests/unit/gapic/memcache_v1/__init__.py | 1 - .../gapic/memcache_v1/test_cloud_memcache.py | 615 +++++++++------- tests/unit/gapic/memcache_v1beta2/__init__.py | 1 - .../memcache_v1beta2/test_cloud_memcache.py | 657 +++++++++++------- 40 files changed, 1558 insertions(+), 1123 deletions(-) create mode 100644 google/cloud/memcache_v1/gapic_metadata.json create mode 100644 google/cloud/memcache_v1beta2/gapic_metadata.json create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/gapic/__init__.py diff --git a/docs/memcache_v1/cloud_memcache.rst b/docs/memcache_v1/cloud_memcache.rst index 0c21866..35de375 100644 --- a/docs/memcache_v1/cloud_memcache.rst +++ b/docs/memcache_v1/cloud_memcache.rst @@ -5,7 +5,6 @@ CloudMemcache :members: :inherited-members: - .. automodule:: google.cloud.memcache_v1.services.cloud_memcache.pagers :members: :inherited-members: diff --git a/docs/memcache_v1beta2/cloud_memcache.rst b/docs/memcache_v1beta2/cloud_memcache.rst index b20fc3a..7dc7a82 100644 --- a/docs/memcache_v1beta2/cloud_memcache.rst +++ b/docs/memcache_v1beta2/cloud_memcache.rst @@ -5,7 +5,6 @@ CloudMemcache :members: :inherited-members: - .. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache.pagers :members: :inherited-members: diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index 4075bad..f2b7c14 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,10 +14,11 @@ # limitations under the License. # +from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( CloudMemcacheAsyncClient, ) -from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient + from google.cloud.memcache_v1.types.cloud_memcache import ApplyParametersRequest from google.cloud.memcache_v1.types.cloud_memcache import CreateInstanceRequest from google.cloud.memcache_v1.types.cloud_memcache import DeleteInstanceRequest @@ -27,15 +27,15 @@ from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesRequest from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesResponse from google.cloud.memcache_v1.types.cloud_memcache import MemcacheParameters -from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion from google.cloud.memcache_v1.types.cloud_memcache import OperationMetadata from google.cloud.memcache_v1.types.cloud_memcache import UpdateInstanceRequest from google.cloud.memcache_v1.types.cloud_memcache import UpdateParametersRequest +from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion __all__ = ( - "ApplyParametersRequest", - "CloudMemcacheAsyncClient", "CloudMemcacheClient", + "CloudMemcacheAsyncClient", + "ApplyParametersRequest", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -43,8 +43,8 @@ "ListInstancesRequest", "ListInstancesResponse", "MemcacheParameters", - "MemcacheVersion", "OperationMetadata", "UpdateInstanceRequest", "UpdateParametersRequest", + "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py index 4d28d1b..7d3016d 100644 --- a/google/cloud/memcache_v1/__init__.py +++ b/google/cloud/memcache_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +15,8 @@ # from .services.cloud_memcache import CloudMemcacheClient +from .services.cloud_memcache import CloudMemcacheAsyncClient + from .types.cloud_memcache import ApplyParametersRequest from .types.cloud_memcache import CreateInstanceRequest from .types.cloud_memcache import DeleteInstanceRequest @@ -24,14 +25,15 @@ from .types.cloud_memcache import ListInstancesRequest from .types.cloud_memcache import ListInstancesResponse from .types.cloud_memcache import MemcacheParameters -from .types.cloud_memcache import MemcacheVersion from .types.cloud_memcache import OperationMetadata from .types.cloud_memcache import UpdateInstanceRequest from .types.cloud_memcache import UpdateParametersRequest - +from .types.cloud_memcache import MemcacheVersion __all__ = ( + "CloudMemcacheAsyncClient", "ApplyParametersRequest", + "CloudMemcacheClient", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -43,5 +45,4 @@ "OperationMetadata", "UpdateInstanceRequest", "UpdateParametersRequest", - "CloudMemcacheClient", ) diff --git a/google/cloud/memcache_v1/gapic_metadata.json b/google/cloud/memcache_v1/gapic_metadata.json new file mode 100644 index 0000000..08d37fa --- /dev/null +++ b/google/cloud/memcache_v1/gapic_metadata.json @@ -0,0 +1,93 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.memcache_v1", + "protoPackage": "google.cloud.memcache.v1", + "schema": "1.0", + "services": { + "CloudMemcache": { + "clients": { + "grpc": { + "libraryClient": "CloudMemcacheClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudMemcacheAsyncClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/memcache_v1/services/__init__.py b/google/cloud/memcache_v1/services/__init__.py index 42ffdf2..4de6597 100644 --- a/google/cloud/memcache_v1/services/__init__.py +++ b/google/cloud/memcache_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py index 8524cb4..efb245e 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import CloudMemcacheClient from .async_client import CloudMemcacheAsyncClient diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index b09fddb..738a2db 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,20 +20,19 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.types import cloud_memcache -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport from .client import CloudMemcacheClient @@ -70,31 +67,26 @@ class CloudMemcacheAsyncClient: instance_path = staticmethod(CloudMemcacheClient.instance_path) parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) - common_billing_account_path = staticmethod( CloudMemcacheClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( CloudMemcacheClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) parse_common_folder_path = staticmethod( CloudMemcacheClient.parse_common_folder_path ) - common_organization_path = staticmethod( CloudMemcacheClient.common_organization_path ) parse_common_organization_path = staticmethod( CloudMemcacheClient.parse_common_organization_path ) - common_project_path = staticmethod(CloudMemcacheClient.common_project_path) parse_common_project_path = staticmethod( CloudMemcacheClient.parse_common_project_path ) - common_location_path = staticmethod(CloudMemcacheClient.common_location_path) parse_common_location_path = staticmethod( CloudMemcacheClient.parse_common_location_path @@ -102,7 +94,8 @@ class CloudMemcacheAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -117,7 +110,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -134,7 +127,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: CloudMemcacheTransport: The transport used by the client instance. @@ -148,12 +141,12 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cloud memcache client. + """Instantiates the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -185,7 +178,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = CloudMemcacheClient( credentials=credentials, transport=transport, @@ -217,7 +209,6 @@ async def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -247,7 +238,6 @@ async def list_instances( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -301,7 +291,6 @@ async def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -326,7 +315,6 @@ async def get_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -398,7 +386,6 @@ async def create_instance( This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -427,7 +414,6 @@ async def create_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if instance is not None: @@ -468,7 +454,7 @@ async def update_instance( request: cloud_memcache.UpdateInstanceRequest = None, *, instance: cloud_memcache.Instance = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -495,7 +481,6 @@ async def update_instance( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -524,7 +509,6 @@ async def update_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: request.instance = instance if update_mask is not None: @@ -565,7 +549,7 @@ async def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -600,7 +584,6 @@ async def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -629,7 +612,6 @@ async def update_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if update_mask is not None: @@ -689,7 +671,6 @@ async def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -729,7 +710,6 @@ async def delete_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -754,7 +734,7 @@ async def delete_instance( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -806,7 +786,6 @@ async def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -835,12 +814,10 @@ async def apply_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if apply_all is not None: request.apply_all = apply_all - if node_ids: request.node_ids.extend(node_ids) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 591ee49..3152eee 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,10 +34,9 @@ from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.types import cloud_memcache -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport @@ -58,7 +55,7 @@ class CloudMemcacheClientMeta(type): _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -100,7 +97,8 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -134,7 +132,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -151,7 +150,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -170,23 +169,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - CloudMemcacheTransport: The transport used by the client instance. + CloudMemcacheTransport: The transport used by the client + instance. """ return self._transport @staticmethod def instance_path(project: str, location: str, instance: str,) -> str: - """Return a fully-qualified instance string.""" + """Returns a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @staticmethod def parse_instance_path(path: str) -> Dict[str, str]: - """Parse a instance path into its component segments.""" + """Parses a instance path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path, @@ -195,7 +195,7 @@ def parse_instance_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -208,7 +208,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -219,7 +219,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -230,7 +230,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -241,7 +241,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -255,12 +255,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cloud memcache client. + """Instantiates the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -315,9 +315,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -329,12 +330,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -349,8 +352,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -389,7 +392,6 @@ def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -421,10 +423,8 @@ def list_instances( # there are no flattened fields. if not isinstance(request, cloud_memcache.ListInstancesRequest): request = cloud_memcache.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -474,7 +474,6 @@ def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -501,10 +500,8 @@ def get_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.GetInstanceRequest): request = cloud_memcache.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -572,7 +569,6 @@ def create_instance( This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -603,10 +599,8 @@ def create_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.CreateInstanceRequest): request = cloud_memcache.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if instance is not None: @@ -643,7 +637,7 @@ def update_instance( request: cloud_memcache.UpdateInstanceRequest = None, *, instance: cloud_memcache.Instance = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -670,7 +664,6 @@ def update_instance( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -701,10 +694,8 @@ def update_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateInstanceRequest): request = cloud_memcache.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: request.instance = instance if update_mask is not None: @@ -741,7 +732,7 @@ def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -776,7 +767,6 @@ def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -807,10 +797,8 @@ def update_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateParametersRequest): request = cloud_memcache.UpdateParametersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if update_mask is not None: @@ -866,7 +854,6 @@ def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -908,10 +895,8 @@ def delete_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.DeleteInstanceRequest): request = cloud_memcache.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -932,7 +917,7 @@ def delete_instance( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -984,7 +969,6 @@ def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1015,10 +999,8 @@ def apply_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplyParametersRequest): request = cloud_memcache.ApplyParametersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if node_ids is not None: diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py index 7a1324e..7723778 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py index 38122c6..32ad848 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index c9b57c7..393f376 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -37,27 +36,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "memcache.googleapis.com" + def __init__( self, *, - host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -66,7 +79,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,29 +93,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -137,11 +197,11 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_instances( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ListInstancesRequest], - typing.Union[ + Union[ cloud_memcache.ListInstancesResponse, - typing.Awaitable[cloud_memcache.ListInstancesResponse], + Awaitable[cloud_memcache.ListInstancesResponse], ], ]: raise NotImplementedError() @@ -149,56 +209,54 @@ def list_instances( @property def get_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.GetInstanceRequest], - typing.Union[ - cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] - ], + Union[cloud_memcache.Instance, Awaitable[cloud_memcache.Instance]], ]: raise NotImplementedError() @property def create_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.CreateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.UpdateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_parameters( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.UpdateParametersRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.DeleteInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def apply_parameters( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ApplyParametersRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 61c7dde..5467640 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO @@ -70,7 +67,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -84,7 +81,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -195,7 +193,7 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -226,13 +224,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -313,7 +313,7 @@ def get_instance( @property def create_instance( self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a new Instance in a given location. @@ -332,14 +332,14 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @property def update_instance( self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. Updates an existing Instance in a given project and @@ -359,14 +359,14 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_instance"] @property def update_parameters( self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: r"""Return a callable for the update parameters method over gRPC. Updates the defined Memcached Parameters for an @@ -388,14 +388,14 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_parameters"] @property def delete_instance( self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a single Instance. @@ -414,14 +414,14 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @property def apply_parameters( self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: r"""Return a callable for the apply parameters method over gRPC. ApplyParameters will restart the set of specified @@ -442,7 +442,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_parameters"] diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index d669536..b21cfd3 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .grpc import CloudMemcacheGrpcTransport @@ -73,7 +70,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -100,13 +97,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -114,7 +113,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -128,7 +127,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -187,7 +187,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -323,7 +322,7 @@ def get_instance( def create_instance( self, ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.CreateInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the create instance method over gRPC. @@ -343,7 +342,7 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @@ -351,7 +350,7 @@ def create_instance( def update_instance( self, ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update instance method over gRPC. @@ -372,7 +371,7 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_instance"] @@ -380,7 +379,7 @@ def update_instance( def update_parameters( self, ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] + [cloud_memcache.UpdateParametersRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update parameters method over gRPC. @@ -403,7 +402,7 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_parameters"] @@ -411,7 +410,7 @@ def update_parameters( def delete_instance( self, ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the delete instance method over gRPC. @@ -431,7 +430,7 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @@ -439,7 +438,7 @@ def delete_instance( def apply_parameters( self, ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] + [cloud_memcache.ApplyParametersRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the apply parameters method over gRPC. @@ -461,7 +460,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_parameters"] diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index 29acce6..2430991 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .cloud_memcache import ( ApplyParametersRequest, CreateInstanceRequest, diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index a148657..2a6777e 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -49,7 +46,6 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): r""" - Attributes: name (str): Required. Unique name of the resource in this scope @@ -133,7 +129,6 @@ class State(proto.Enum): class NodeConfig(proto.Message): r"""Configuration for a Memcached Node. - Attributes: cpu_count (int): Required. Number of cpus per Memcached node. @@ -142,13 +137,11 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field(proto.INT32, number=1) - - memory_size_mb = proto.Field(proto.INT32, number=2) + cpu_count = proto.Field(proto.INT32, number=1,) + memory_size_mb = proto.Field(proto.INT32, number=2,) class Node(proto.Message): r""" - Attributes: node_id (str): Output only. Identifier of the Memcached @@ -180,21 +173,15 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field(proto.STRING, number=1) - - zone = proto.Field(proto.STRING, number=2) - + node_id = proto.Field(proto.STRING, number=1,) + zone = proto.Field(proto.STRING, number=2,) state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) - - host = proto.Field(proto.STRING, number=4) - - port = proto.Field(proto.INT32, number=5) - + host = proto.Field(proto.STRING, number=4,) + port = proto.Field(proto.INT32, number=5,) parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) class InstanceMessage(proto.Message): r""" - Attributes: code (google.cloud.memcache_v1.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- @@ -210,42 +197,30 @@ class Code(proto.Enum): ZONE_DISTRIBUTION_UNBALANCED = 1 code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) - - message = proto.Field(proto.STRING, number=2) - - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) - - authorized_network = proto.Field(proto.STRING, number=4) - - zones = proto.RepeatedField(proto.STRING, number=5) - - node_count = proto.Field(proto.INT32, number=6) - + message = proto.Field(proto.STRING, number=2,) + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) + authorized_network = proto.Field(proto.STRING, number=4,) + zones = proto.RepeatedField(proto.STRING, number=5,) + node_count = proto.Field(proto.INT32, number=6,) node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) - memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) - parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) - memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) - - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) - + create_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) state = proto.Field(proto.ENUM, number=15, enum=State,) - - memcache_full_version = proto.Field(proto.STRING, number=18) - + memcache_full_version = proto.Field(proto.STRING, number=18,) instance_messages = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - - discovery_endpoint = proto.Field(proto.STRING, number=20) + discovery_endpoint = proto.Field(proto.STRING, number=20,) class ListInstancesRequest(proto.Message): @@ -278,15 +253,11 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListInstancesResponse(proto.Message): @@ -314,10 +285,8 @@ def raw_page(self): return self instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetInstanceRequest(proto.Message): @@ -331,7 +300,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateInstanceRequest(proto.Message): @@ -360,10 +329,8 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached Instance """ - parent = proto.Field(proto.STRING, number=1) - - instance_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) @@ -381,8 +348,9 @@ class UpdateInstanceRequest(proto.Message): update_mask are updated. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) instance = proto.Field(proto.MESSAGE, number=2, message="Instance",) @@ -397,7 +365,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ApplyParametersRequest(proto.Message): @@ -420,11 +388,9 @@ class ApplyParametersRequest(proto.Message): nodes within the instance. """ - name = proto.Field(proto.STRING, number=1) - - node_ids = proto.RepeatedField(proto.STRING, number=2) - - apply_all = proto.Field(proto.BOOL, number=3) + name = proto.Field(proto.STRING, number=1,) + node_ids = proto.RepeatedField(proto.STRING, number=2,) + apply_all = proto.Field(proto.BOOL, number=3,) class UpdateParametersRequest(proto.Message): @@ -442,16 +408,15 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field(proto.STRING, number=1) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) - + name = proto.Field(proto.STRING, number=1,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) class MemcacheParameters(proto.Message): r""" - Attributes: id (str): Output only. The unique ID associated with @@ -465,14 +430,12 @@ class MemcacheParameters(proto.Message): memcached process. """ - id = proto.Field(proto.STRING, number=1) - - params = proto.MapField(proto.STRING, proto.STRING, number=3) + id = proto.Field(proto.STRING, number=1,) + params = proto.MapField(proto.STRING, proto.STRING, number=3,) class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. - Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -500,19 +463,13 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - target = proto.Field(proto.STRING, number=3) - - verb = proto.Field(proto.STRING, number=4) - - status_detail = proto.Field(proto.STRING, number=5) - - cancel_requested = proto.Field(proto.BOOL, number=6) - - api_version = proto.Field(proto.STRING, number=7) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_detail = proto.Field(proto.STRING, number=5,) + cancel_requested = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index bac2393..54fa8a3 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +15,8 @@ # from .services.cloud_memcache import CloudMemcacheClient +from .services.cloud_memcache import CloudMemcacheAsyncClient + from .types.cloud_memcache import ApplyParametersRequest from .types.cloud_memcache import ApplySoftwareUpdateRequest from .types.cloud_memcache import CreateInstanceRequest @@ -26,16 +27,17 @@ from .types.cloud_memcache import ListInstancesResponse from .types.cloud_memcache import LocationMetadata from .types.cloud_memcache import MemcacheParameters -from .types.cloud_memcache import MemcacheVersion from .types.cloud_memcache import OperationMetadata from .types.cloud_memcache import UpdateInstanceRequest from .types.cloud_memcache import UpdateParametersRequest from .types.cloud_memcache import ZoneMetadata - +from .types.cloud_memcache import MemcacheVersion __all__ = ( + "CloudMemcacheAsyncClient", "ApplyParametersRequest", "ApplySoftwareUpdateRequest", + "CloudMemcacheClient", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -49,5 +51,4 @@ "UpdateInstanceRequest", "UpdateParametersRequest", "ZoneMetadata", - "CloudMemcacheClient", ) diff --git a/google/cloud/memcache_v1beta2/gapic_metadata.json b/google/cloud/memcache_v1beta2/gapic_metadata.json new file mode 100644 index 0000000..288ef89 --- /dev/null +++ b/google/cloud/memcache_v1beta2/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.memcache_v1beta2", + "protoPackage": "google.cloud.memcache.v1beta2", + "schema": "1.0", + "services": { + "CloudMemcache": { + "clients": { + "grpc": { + "libraryClient": "CloudMemcacheClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "ApplySoftwareUpdate": { + "methods": [ + "apply_software_update" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudMemcacheAsyncClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "ApplySoftwareUpdate": { + "methods": [ + "apply_software_update" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/memcache_v1beta2/services/__init__.py b/google/cloud/memcache_v1beta2/services/__init__.py index 42ffdf2..4de6597 100644 --- a/google/cloud/memcache_v1beta2/services/__init__.py +++ b/google/cloud/memcache_v1beta2/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py index 8524cb4..efb245e 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import CloudMemcacheClient from .async_client import CloudMemcacheAsyncClient diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index b687488..4417473 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,20 +20,19 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport from .client import CloudMemcacheClient @@ -70,31 +67,26 @@ class CloudMemcacheAsyncClient: instance_path = staticmethod(CloudMemcacheClient.instance_path) parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) - common_billing_account_path = staticmethod( CloudMemcacheClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( CloudMemcacheClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) parse_common_folder_path = staticmethod( CloudMemcacheClient.parse_common_folder_path ) - common_organization_path = staticmethod( CloudMemcacheClient.common_organization_path ) parse_common_organization_path = staticmethod( CloudMemcacheClient.parse_common_organization_path ) - common_project_path = staticmethod(CloudMemcacheClient.common_project_path) parse_common_project_path = staticmethod( CloudMemcacheClient.parse_common_project_path ) - common_location_path = staticmethod(CloudMemcacheClient.common_location_path) parse_common_location_path = staticmethod( CloudMemcacheClient.parse_common_location_path @@ -102,7 +94,8 @@ class CloudMemcacheAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -117,7 +110,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -134,7 +127,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: CloudMemcacheTransport: The transport used by the client instance. @@ -148,12 +141,12 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cloud memcache client. + """Instantiates the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -185,7 +178,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = CloudMemcacheClient( credentials=credentials, transport=transport, @@ -217,7 +209,6 @@ async def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -247,7 +238,6 @@ async def list_instances( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -301,7 +291,6 @@ async def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -326,7 +315,6 @@ async def get_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -398,7 +386,6 @@ async def create_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -428,7 +415,6 @@ async def create_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if instance_id is not None: @@ -468,7 +454,7 @@ async def update_instance( self, request: cloud_memcache.UpdateInstanceRequest = None, *, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, resource: cloud_memcache.Instance = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -496,7 +482,6 @@ async def update_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -526,7 +511,6 @@ async def update_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if update_mask is not None: request.update_mask = update_mask if resource is not None: @@ -567,7 +551,7 @@ async def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -602,7 +586,6 @@ async def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -632,7 +615,6 @@ async def update_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if update_mask is not None: @@ -692,7 +674,6 @@ async def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -732,7 +713,6 @@ async def delete_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -757,7 +737,7 @@ async def delete_instance( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -807,7 +787,6 @@ async def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -837,12 +816,10 @@ async def apply_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if apply_all is not None: request.apply_all = apply_all - if node_ids: request.node_ids.extend(node_ids) @@ -919,7 +896,6 @@ async def apply_software_update( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -949,12 +925,10 @@ async def apply_software_update( # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: request.instance = instance if apply_all is not None: request.apply_all = apply_all - if node_ids: request.node_ids.extend(node_ids) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 8d43719..65a2bc3 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,10 +34,9 @@ from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport @@ -58,7 +55,7 @@ class CloudMemcacheClientMeta(type): _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -100,7 +97,8 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -134,7 +132,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -151,7 +150,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -170,23 +169,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - CloudMemcacheTransport: The transport used by the client instance. + CloudMemcacheTransport: The transport used by the client + instance. """ return self._transport @staticmethod def instance_path(project: str, location: str, instance: str,) -> str: - """Return a fully-qualified instance string.""" + """Returns a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @staticmethod def parse_instance_path(path: str) -> Dict[str, str]: - """Parse a instance path into its component segments.""" + """Parses a instance path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path, @@ -195,7 +195,7 @@ def parse_instance_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -208,7 +208,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -219,7 +219,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -230,7 +230,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -241,7 +241,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -255,12 +255,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cloud memcache client. + """Instantiates the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -315,9 +315,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -329,12 +330,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -349,8 +352,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -389,7 +392,6 @@ def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -421,10 +423,8 @@ def list_instances( # there are no flattened fields. if not isinstance(request, cloud_memcache.ListInstancesRequest): request = cloud_memcache.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -474,7 +474,6 @@ def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -501,10 +500,8 @@ def get_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.GetInstanceRequest): request = cloud_memcache.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -572,7 +569,6 @@ def create_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -604,10 +600,8 @@ def create_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.CreateInstanceRequest): request = cloud_memcache.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if instance_id is not None: @@ -643,7 +637,7 @@ def update_instance( self, request: cloud_memcache.UpdateInstanceRequest = None, *, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, resource: cloud_memcache.Instance = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -671,7 +665,6 @@ def update_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -703,10 +696,8 @@ def update_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateInstanceRequest): request = cloud_memcache.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if update_mask is not None: request.update_mask = update_mask if resource is not None: @@ -743,7 +734,7 @@ def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -778,7 +769,6 @@ def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -810,10 +800,8 @@ def update_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateParametersRequest): request = cloud_memcache.UpdateParametersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if update_mask is not None: @@ -869,7 +857,6 @@ def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -911,10 +898,8 @@ def delete_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.DeleteInstanceRequest): request = cloud_memcache.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -935,7 +920,7 @@ def delete_instance( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -985,7 +970,6 @@ def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1017,10 +1001,8 @@ def apply_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplyParametersRequest): request = cloud_memcache.ApplyParametersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if node_ids is not None: @@ -1097,7 +1079,6 @@ def apply_software_update( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1129,10 +1110,8 @@ def apply_software_update( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplySoftwareUpdateRequest): request = cloud_memcache.ApplySoftwareUpdateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: request.instance = instance if node_ids is not None: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index 5b69afd..381459d 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py index 38122c6..32ad848 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index a1b9e2c..1fb1292 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -37,27 +36,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "memcache.googleapis.com" + def __init__( self, *, - host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -66,7 +79,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,29 +93,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -142,11 +202,11 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_instances( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ListInstancesRequest], - typing.Union[ + Union[ cloud_memcache.ListInstancesResponse, - typing.Awaitable[cloud_memcache.ListInstancesResponse], + Awaitable[cloud_memcache.ListInstancesResponse], ], ]: raise NotImplementedError() @@ -154,65 +214,63 @@ def list_instances( @property def get_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.GetInstanceRequest], - typing.Union[ - cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] - ], + Union[cloud_memcache.Instance, Awaitable[cloud_memcache.Instance]], ]: raise NotImplementedError() @property def create_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.CreateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.UpdateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_parameters( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.UpdateParametersRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.DeleteInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def apply_parameters( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ApplyParametersRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def apply_software_update( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ApplySoftwareUpdateRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index f5f202f..cf61dee 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO @@ -70,7 +67,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -84,7 +81,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -195,7 +193,7 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -226,13 +224,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -313,7 +313,7 @@ def get_instance( @property def create_instance( self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a new Instance in a given location. @@ -332,14 +332,14 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @property def update_instance( self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. Updates an existing Instance in a given project and @@ -359,14 +359,14 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_instance"] @property def update_parameters( self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: r"""Return a callable for the update parameters method over gRPC. Updates the defined Memcached parameters for an existing @@ -388,14 +388,14 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_parameters"] @property def delete_instance( self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a single Instance. @@ -414,14 +414,14 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @property def apply_parameters( self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: r"""Return a callable for the apply parameters method over gRPC. ``ApplyParameters`` restarts the set of specified nodes in order @@ -442,14 +442,16 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_parameters"] @property def apply_software_update( self, - ) -> Callable[[cloud_memcache.ApplySoftwareUpdateRequest], operations.Operation]: + ) -> Callable[ + [cloud_memcache.ApplySoftwareUpdateRequest], operations_pb2.Operation + ]: r"""Return a callable for the apply software update method over gRPC. Updates software on the selected nodes of the @@ -469,7 +471,7 @@ def apply_software_update( self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_software_update"] diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index f2452b4..e5a9f15 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .grpc import CloudMemcacheGrpcTransport @@ -73,7 +70,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -100,13 +97,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -114,7 +113,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -128,7 +127,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -187,7 +187,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -323,7 +322,7 @@ def get_instance( def create_instance( self, ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.CreateInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the create instance method over gRPC. @@ -343,7 +342,7 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @@ -351,7 +350,7 @@ def create_instance( def update_instance( self, ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update instance method over gRPC. @@ -372,7 +371,7 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_instance"] @@ -380,7 +379,7 @@ def update_instance( def update_parameters( self, ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] + [cloud_memcache.UpdateParametersRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update parameters method over gRPC. @@ -403,7 +402,7 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_parameters"] @@ -411,7 +410,7 @@ def update_parameters( def delete_instance( self, ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the delete instance method over gRPC. @@ -431,7 +430,7 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @@ -439,7 +438,7 @@ def delete_instance( def apply_parameters( self, ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] + [cloud_memcache.ApplyParametersRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the apply parameters method over gRPC. @@ -461,7 +460,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_parameters"] @@ -469,7 +468,7 @@ def apply_parameters( def apply_software_update( self, ) -> Callable[ - [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations.Operation] + [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the apply software update method over gRPC. @@ -490,7 +489,7 @@ def apply_software_update( self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_software_update"] diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 90cf3eb..a4e788a 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .cloud_memcache import ( ApplyParametersRequest, ApplySoftwareUpdateRequest, diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index b7e9a43..6fdd3b8 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -52,7 +49,6 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): r"""A Memorystore for Memcached instance - Attributes: name (str): Required. Unique name of the resource in this scope @@ -142,7 +138,6 @@ class State(proto.Enum): class NodeConfig(proto.Message): r"""Configuration for a Memcached Node. - Attributes: cpu_count (int): Required. Number of cpus per Memcached node. @@ -151,13 +146,11 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field(proto.INT32, number=1) - - memory_size_mb = proto.Field(proto.INT32, number=2) + cpu_count = proto.Field(proto.INT32, number=1,) + memory_size_mb = proto.Field(proto.INT32, number=2,) class Node(proto.Message): r""" - Attributes: node_id (str): Output only. Identifier of the Memcached @@ -192,23 +185,16 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field(proto.STRING, number=1) - - zone = proto.Field(proto.STRING, number=2) - + node_id = proto.Field(proto.STRING, number=1,) + zone = proto.Field(proto.STRING, number=2,) state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) - - host = proto.Field(proto.STRING, number=4) - - port = proto.Field(proto.INT32, number=5) - + host = proto.Field(proto.STRING, number=4,) + port = proto.Field(proto.INT32, number=5,) parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) - - update_available = proto.Field(proto.BOOL, number=7) + update_available = proto.Field(proto.BOOL, number=7,) class InstanceMessage(proto.Message): r""" - Attributes: code (google.cloud.memcache_v1beta2.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- @@ -224,44 +210,31 @@ class Code(proto.Enum): ZONE_DISTRIBUTION_UNBALANCED = 1 code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) - - message = proto.Field(proto.STRING, number=2) - - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) - - authorized_network = proto.Field(proto.STRING, number=4) - - zones = proto.RepeatedField(proto.STRING, number=5) - - node_count = proto.Field(proto.INT32, number=6) - + message = proto.Field(proto.STRING, number=2,) + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) + authorized_network = proto.Field(proto.STRING, number=4,) + zones = proto.RepeatedField(proto.STRING, number=5,) + node_count = proto.Field(proto.INT32, number=6,) node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) - memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) - parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) - memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) - - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) - + create_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) state = proto.Field(proto.ENUM, number=15, enum=State,) - - memcache_full_version = proto.Field(proto.STRING, number=18) - + memcache_full_version = proto.Field(proto.STRING, number=18,) instance_messages = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - - discovery_endpoint = proto.Field(proto.STRING, number=20) - - update_available = proto.Field(proto.BOOL, number=21) + discovery_endpoint = proto.Field(proto.STRING, number=20,) + update_available = proto.Field(proto.BOOL, number=21,) class ListInstancesRequest(proto.Message): @@ -294,15 +267,11 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListInstancesResponse(proto.Message): @@ -330,10 +299,8 @@ def raw_page(self): return self resources = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetInstanceRequest(proto.Message): @@ -347,7 +314,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateInstanceRequest(proto.Message): @@ -376,10 +343,8 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached [Instance] resource """ - parent = proto.Field(proto.STRING, number=1) - - instance_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) resource = proto.Field(proto.MESSAGE, number=3, message="Instance",) @@ -397,8 +362,9 @@ class UpdateInstanceRequest(proto.Message): specified in update_mask are updated. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) resource = proto.Field(proto.MESSAGE, number=2, message="Instance",) @@ -413,7 +379,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ApplyParametersRequest(proto.Message): @@ -435,11 +401,9 @@ class ApplyParametersRequest(proto.Message): within the instance. """ - name = proto.Field(proto.STRING, number=1) - - node_ids = proto.RepeatedField(proto.STRING, number=2) - - apply_all = proto.Field(proto.BOOL, number=3) + name = proto.Field(proto.STRING, number=1,) + node_ids = proto.RepeatedField(proto.STRING, number=2,) + apply_all = proto.Field(proto.BOOL, number=3,) class UpdateParametersRequest(proto.Message): @@ -457,10 +421,10 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field(proto.STRING, number=1) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) - + name = proto.Field(proto.STRING, number=1,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) @@ -485,11 +449,9 @@ class ApplySoftwareUpdateRequest(proto.Message): instance. """ - instance = proto.Field(proto.STRING, number=1) - - node_ids = proto.RepeatedField(proto.STRING, number=2) - - apply_all = proto.Field(proto.BOOL, number=3) + instance = proto.Field(proto.STRING, number=1,) + node_ids = proto.RepeatedField(proto.STRING, number=2,) + apply_all = proto.Field(proto.BOOL, number=3,) class MemcacheParameters(proto.Message): @@ -507,14 +469,12 @@ class MemcacheParameters(proto.Message): memcached process. """ - id = proto.Field(proto.STRING, number=1) - - params = proto.MapField(proto.STRING, proto.STRING, number=3) + id = proto.Field(proto.STRING, number=1,) + params = proto.MapField(proto.STRING, proto.STRING, number=3,) class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. - Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -542,19 +502,13 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - target = proto.Field(proto.STRING, number=3) - - verb = proto.Field(proto.STRING, number=4) - - status_detail = proto.Field(proto.STRING, number=5) - - cancel_requested = proto.Field(proto.BOOL, number=6) - - api_version = proto.Field(proto.STRING, number=7) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_detail = proto.Field(proto.STRING, number=5,) + cancel_requested = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) class LocationMetadata(proto.Message): @@ -575,7 +529,7 @@ class LocationMetadata(proto.Message): class ZoneMetadata(proto.Message): - r"""""" + r""" """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py index eac442a..774b03f 100644 --- a/scripts/fixup_memcache_v1_keywords.py +++ b/scripts/fixup_memcache_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,14 +39,13 @@ def partition( class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'instance', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), - + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'instance', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -79,7 +76,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index 4267b6f..afe0f42 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,15 +39,14 @@ def partition( class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'resource', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'resource', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), - + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'resource', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'resource', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -80,7 +77,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/setup.py b/setup.py index e9985ee..da77bb3 100644 --- a/setup.py +++ b/setup.py @@ -42,6 +42,7 @@ install_requires=( "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.4.0", + "packaging >= 14.3", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index a929701..71b8e10 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -7,3 +7,5 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.22.2 proto-plus==1.15.0 +packaging==14.3 +google-auth==1.24.0 # TODO: remove when google-auth >= 1.25.0 is transitively required through google-api-core diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py index 42ffdf2..4de6597 100644 --- a/tests/unit/gapic/memcache_v1/__init__.py +++ b/tests/unit/gapic/memcache_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index b4793ec..4970a18 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,26 +23,56 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheAsyncClient from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.services.cloud_memcache import transports +from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.memcache_v1.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -94,7 +123,7 @@ def test__get_default_mtls_endpoint(): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -111,7 +140,7 @@ def test_cloud_memcache_client_from_service_account_info(client_class): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -164,7 +193,7 @@ def test_cloud_memcache_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -452,7 +481,7 @@ def test_list_instances( transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -465,21 +494,16 @@ def test_list_instances( call.return_value = cloud_memcache.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -491,7 +515,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -499,7 +523,6 @@ def test_list_instances_empty_call(): client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() @@ -508,7 +531,7 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -524,20 +547,16 @@ async def test_list_instances_async( unreachable=["unreachable_value"], ) ) - response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -547,17 +566,17 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_memcache.ListInstancesResponse() - client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -572,11 +591,14 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -584,7 +606,6 @@ async def test_list_instances_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.ListInstancesResponse() ) - await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -598,13 +619,12 @@ async def test_list_instances_field_headers_async(): def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances(parent="parent_value",) @@ -613,12 +633,11 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -630,7 +649,9 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -648,13 +669,14 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -665,7 +687,7 @@ async def test_list_instances_flattened_error_async(): def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -703,7 +725,7 @@ def test_list_instances_pager(): def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -733,7 +755,7 @@ def test_list_instances_pages(): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -770,7 +792,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -806,7 +828,7 @@ def test_get_instance( transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -827,35 +849,23 @@ def test_get_instance( memcache_full_version="memcache_full_version_value", discovery_endpoint="discovery_endpoint_value", ) - response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" @@ -867,7 +877,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -875,7 +885,6 @@ def test_get_instance_empty_call(): client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() @@ -884,7 +893,7 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -907,34 +916,23 @@ async def test_get_instance_async( discovery_endpoint="discovery_endpoint_value", ) ) - response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" @@ -944,17 +942,17 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_memcache.Instance() - client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -969,11 +967,14 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -981,7 +982,6 @@ async def test_get_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.Instance() ) - await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -995,13 +995,12 @@ async def test_get_instance_field_headers_async(): def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.Instance() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance(name="name_value",) @@ -1010,12 +1009,11 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1027,7 +1025,9 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1045,13 +1045,14 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1065,7 +1066,7 @@ def test_create_instance( transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1076,13 +1077,11 @@ def test_create_instance( with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1097,7 +1096,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1105,7 +1104,6 @@ def test_create_instance_empty_call(): client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() @@ -1114,7 +1112,7 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1127,13 +1125,11 @@ async def test_create_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1146,17 +1142,17 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1171,11 +1167,14 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1183,7 +1182,6 @@ async def test_create_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1197,13 +1195,12 @@ async def test_create_instance_field_headers_async(): def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( @@ -1216,16 +1213,13 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].instance_id == "instance_id_value" def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1240,7 +1234,9 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1262,17 +1258,16 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].instance_id == "instance_id_value" @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1289,7 +1284,7 @@ def test_update_instance( transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1300,13 +1295,11 @@ def test_update_instance( with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1321,7 +1314,7 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1329,7 +1322,6 @@ def test_update_instance_empty_call(): client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() @@ -1338,7 +1330,7 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1351,13 +1343,11 @@ async def test_update_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1370,17 +1360,17 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() + request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1397,11 +1387,14 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() + request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1409,7 +1402,6 @@ async def test_update_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1425,32 +1417,29 @@ async def test_update_instance_field_headers_async(): def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].instance == cloud_memcache.Instance(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1458,13 +1447,15 @@ def test_update_instance_flattened_error(): client.update_instance( cloud_memcache.UpdateInstanceRequest(), instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1478,22 +1469,22 @@ async def test_update_instance_flattened_async(): # using the keyword arguments to the method. response = await client.update_instance( instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].instance == cloud_memcache.Instance(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1501,7 +1492,7 @@ async def test_update_instance_flattened_error_async(): await client.update_instance( cloud_memcache.UpdateInstanceRequest(), instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1509,7 +1500,7 @@ def test_update_parameters( transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1522,13 +1513,11 @@ def test_update_parameters( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1543,7 +1532,7 @@ def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1553,7 +1542,6 @@ def test_update_parameters_empty_call(): client.update_parameters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() @@ -1562,7 +1550,7 @@ async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1577,13 +1565,11 @@ async def test_update_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1596,11 +1582,12 @@ async def test_update_parameters_async_from_dict(): def test_update_parameters_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1608,7 +1595,6 @@ def test_update_parameters_field_headers(): type(client.transport.update_parameters), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1623,11 +1609,14 @@ def test_update_parameters_field_headers(): @pytest.mark.asyncio async def test_update_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1637,7 +1626,6 @@ async def test_update_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1651,7 +1639,7 @@ async def test_update_parameters_field_headers_async(): def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1659,12 +1647,11 @@ def test_update_parameters_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_parameters( name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1672,16 +1659,13 @@ def test_update_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1689,14 +1673,16 @@ def test_update_parameters_flattened_error(): client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @pytest.mark.asyncio async def test_update_parameters_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1712,7 +1698,7 @@ async def test_update_parameters_flattened_async(): # using the keyword arguments to the method. response = await client.update_parameters( name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1720,17 +1706,16 @@ async def test_update_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") @pytest.mark.asyncio async def test_update_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1738,7 +1723,7 @@ async def test_update_parameters_flattened_error_async(): await client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1747,7 +1732,7 @@ def test_delete_instance( transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1758,13 +1743,11 @@ def test_delete_instance( with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1779,7 +1762,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1787,7 +1770,6 @@ def test_delete_instance_empty_call(): client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() @@ -1796,7 +1778,7 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1809,13 +1791,11 @@ async def test_delete_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1828,17 +1808,17 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1853,11 +1833,14 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1865,7 +1848,6 @@ async def test_delete_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1879,13 +1861,12 @@ async def test_delete_instance_field_headers_async(): def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance(name="name_value",) @@ -1894,12 +1875,11 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1911,7 +1891,9 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1929,13 +1911,14 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1949,7 +1932,7 @@ def test_apply_parameters( transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1960,13 +1943,11 @@ def test_apply_parameters( with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -1981,7 +1962,7 @@ def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1989,7 +1970,6 @@ def test_apply_parameters_empty_call(): client.apply_parameters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() @@ -1998,7 +1978,7 @@ async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2011,13 +1991,11 @@ async def test_apply_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -2030,17 +2008,17 @@ async def test_apply_parameters_async_from_dict(): def test_apply_parameters_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2055,11 +2033,14 @@ def test_apply_parameters_field_headers(): @pytest.mark.asyncio async def test_apply_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2067,7 +2048,6 @@ async def test_apply_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2081,13 +2061,12 @@ async def test_apply_parameters_field_headers_async(): def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_parameters( @@ -2098,16 +2077,13 @@ def test_apply_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2122,7 +2098,9 @@ def test_apply_parameters_flattened_error(): @pytest.mark.asyncio async def test_apply_parameters_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2142,17 +2120,16 @@ async def test_apply_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2168,16 +2145,16 @@ async def test_apply_parameters_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2187,7 +2164,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2198,7 +2175,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = CloudMemcacheClient(transport=transport) assert client.transport is transport @@ -2207,13 +2184,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2228,23 +2205,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) def test_cloud_memcache_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2256,7 +2233,7 @@ def test_cloud_memcache_base_transport(): ) as Transport: Transport.return_value = None transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2280,15 +2257,37 @@ def test_cloud_memcache_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2301,19 +2300,33 @@ def test_cloud_memcache_base_transport_with_credentials_file(): def test_cloud_memcache_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_cloud_memcache_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudMemcacheClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudMemcacheClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2321,20 +2334,156 @@ def test_cloud_memcache_auth_adc(): ) -def test_cloud_memcache_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cloud_memcache_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="memcache.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cloud_memcache_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cloud_memcache_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2343,7 +2492,7 @@ def test_cloud_memcache_transport_auth_adc(): ], ) def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2382,7 +2531,7 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla def test_cloud_memcache_host_no_port(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com" ), @@ -2392,7 +2541,7 @@ def test_cloud_memcache_host_no_port(): def test_cloud_memcache_host_with_port(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com:8000" ), @@ -2446,9 +2595,9 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2524,7 +2673,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): def test_cloud_memcache_grpc_lro_client(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2537,7 +2686,7 @@ def test_cloud_memcache_grpc_lro_client(): def test_cloud_memcache_grpc_lro_async_client(): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2552,7 +2701,6 @@ def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @@ -2575,7 +2723,6 @@ def test_parse_instance_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2596,7 +2743,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = CloudMemcacheClient.common_folder_path(folder) assert expected == actual @@ -2615,7 +2761,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = CloudMemcacheClient.common_organization_path(organization) assert expected == actual @@ -2634,7 +2779,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = CloudMemcacheClient.common_project_path(project) assert expected == actual @@ -2654,7 +2798,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2681,7 +2824,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.CloudMemcacheTransport, "_prep_wrapped_messages" ) as prep: client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2690,6 +2833,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = CloudMemcacheClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/memcache_v1beta2/__init__.py b/tests/unit/gapic/memcache_v1beta2/__init__.py index 42ffdf2..4de6597 100644 --- a/tests/unit/gapic/memcache_v1beta2/__init__.py +++ b/tests/unit/gapic/memcache_v1beta2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 1e242ab..b991eec 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.memcache_v1beta2.services.cloud_memcache import ( CloudMemcacheAsyncClient, @@ -41,11 +40,41 @@ from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.services.cloud_memcache import transports +from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.memcache_v1beta2.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -96,7 +125,7 @@ def test__get_default_mtls_endpoint(): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -113,7 +142,7 @@ def test_cloud_memcache_client_from_service_account_info(client_class): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -166,7 +195,7 @@ def test_cloud_memcache_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -454,7 +483,7 @@ def test_list_instances( transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -467,21 +496,16 @@ def test_list_instances( call.return_value = cloud_memcache.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -493,7 +517,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -501,7 +525,6 @@ def test_list_instances_empty_call(): client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() @@ -510,7 +533,7 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -526,20 +549,16 @@ async def test_list_instances_async( unreachable=["unreachable_value"], ) ) - response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -549,17 +568,17 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_memcache.ListInstancesResponse() - client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -574,11 +593,14 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -586,7 +608,6 @@ async def test_list_instances_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.ListInstancesResponse() ) - await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -600,13 +621,12 @@ async def test_list_instances_field_headers_async(): def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances(parent="parent_value",) @@ -615,12 +635,11 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -632,7 +651,9 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -650,13 +671,14 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -667,7 +689,7 @@ async def test_list_instances_flattened_error_async(): def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -705,7 +727,7 @@ def test_list_instances_pager(): def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -735,7 +757,7 @@ def test_list_instances_pages(): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -772,7 +794,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -808,7 +830,7 @@ def test_get_instance( transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -830,37 +852,24 @@ def test_get_instance( discovery_endpoint="discovery_endpoint_value", update_available=True, ) - response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - assert response.update_available is True @@ -872,7 +881,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -880,7 +889,6 @@ def test_get_instance_empty_call(): client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() @@ -889,7 +897,7 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -913,36 +921,24 @@ async def test_get_instance_async( update_available=True, ) ) - response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - assert response.update_available is True @@ -952,17 +948,17 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_memcache.Instance() - client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -977,11 +973,14 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -989,7 +988,6 @@ async def test_get_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.Instance() ) - await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1003,13 +1001,12 @@ async def test_get_instance_field_headers_async(): def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.Instance() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance(name="name_value",) @@ -1018,12 +1015,11 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1035,7 +1031,9 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1053,13 +1051,14 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1073,7 +1072,7 @@ def test_create_instance( transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1084,13 +1083,11 @@ def test_create_instance( with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1105,7 +1102,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1113,7 +1110,6 @@ def test_create_instance_empty_call(): client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() @@ -1122,7 +1118,7 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1135,13 +1131,11 @@ async def test_create_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1154,17 +1148,17 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1179,11 +1173,14 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1191,7 +1188,6 @@ async def test_create_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1205,13 +1201,12 @@ async def test_create_instance_field_headers_async(): def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( @@ -1224,16 +1219,13 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance_id == "instance_id_value" - assert args[0].resource == cloud_memcache.Instance(name="name_value") def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1248,7 +1240,9 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1270,17 +1264,16 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance_id == "instance_id_value" - assert args[0].resource == cloud_memcache.Instance(name="name_value") @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1297,7 +1290,7 @@ def test_update_instance( transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1308,13 +1301,11 @@ def test_update_instance( with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1329,7 +1320,7 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1337,7 +1328,6 @@ def test_update_instance_empty_call(): client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() @@ -1346,7 +1336,7 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1359,13 +1349,11 @@ async def test_update_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1378,17 +1366,17 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() + request.resource.name = "resource.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1405,11 +1393,14 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() + request.resource.name = "resource.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1417,7 +1408,6 @@ async def test_update_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1433,17 +1423,16 @@ async def test_update_instance_field_headers_async(): def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1451,28 +1440,28 @@ def test_update_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].resource == cloud_memcache.Instance(name="name_value") def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_instance( cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @pytest.mark.asyncio async def test_update_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1485,7 +1474,7 @@ async def test_update_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_instance( - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1493,22 +1482,22 @@ async def test_update_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].resource == cloud_memcache.Instance(name="name_value") @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_instance( cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1517,7 +1506,7 @@ def test_update_parameters( transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1530,13 +1519,11 @@ def test_update_parameters( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1551,7 +1538,7 @@ def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1561,7 +1548,6 @@ def test_update_parameters_empty_call(): client.update_parameters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() @@ -1570,7 +1556,7 @@ async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1585,13 +1571,11 @@ async def test_update_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1604,11 +1588,12 @@ async def test_update_parameters_async_from_dict(): def test_update_parameters_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1616,7 +1601,6 @@ def test_update_parameters_field_headers(): type(client.transport.update_parameters), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1631,11 +1615,14 @@ def test_update_parameters_field_headers(): @pytest.mark.asyncio async def test_update_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1645,7 +1632,6 @@ async def test_update_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1659,7 +1645,7 @@ async def test_update_parameters_field_headers_async(): def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1667,12 +1653,11 @@ def test_update_parameters_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_parameters( name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1680,16 +1665,13 @@ def test_update_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1697,14 +1679,16 @@ def test_update_parameters_flattened_error(): client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @pytest.mark.asyncio async def test_update_parameters_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1720,7 +1704,7 @@ async def test_update_parameters_flattened_async(): # using the keyword arguments to the method. response = await client.update_parameters( name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1728,17 +1712,16 @@ async def test_update_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") @pytest.mark.asyncio async def test_update_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1746,7 +1729,7 @@ async def test_update_parameters_flattened_error_async(): await client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1755,7 +1738,7 @@ def test_delete_instance( transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1766,13 +1749,11 @@ def test_delete_instance( with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1787,7 +1768,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1795,7 +1776,6 @@ def test_delete_instance_empty_call(): client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() @@ -1804,7 +1784,7 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1817,13 +1797,11 @@ async def test_delete_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1836,17 +1814,17 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1861,11 +1839,14 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1873,7 +1854,6 @@ async def test_delete_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1887,13 +1867,12 @@ async def test_delete_instance_field_headers_async(): def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance(name="name_value",) @@ -1902,12 +1881,11 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1919,7 +1897,9 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1937,13 +1917,14 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1957,7 +1938,7 @@ def test_apply_parameters( transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1968,13 +1949,11 @@ def test_apply_parameters( with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -1989,7 +1968,7 @@ def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1997,7 +1976,6 @@ def test_apply_parameters_empty_call(): client.apply_parameters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() @@ -2006,7 +1984,7 @@ async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2019,13 +1997,11 @@ async def test_apply_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -2038,17 +2014,17 @@ async def test_apply_parameters_async_from_dict(): def test_apply_parameters_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2063,11 +2039,14 @@ def test_apply_parameters_field_headers(): @pytest.mark.asyncio async def test_apply_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2075,7 +2054,6 @@ async def test_apply_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2089,13 +2067,12 @@ async def test_apply_parameters_field_headers_async(): def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_parameters( @@ -2106,16 +2083,13 @@ def test_apply_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2130,7 +2104,9 @@ def test_apply_parameters_flattened_error(): @pytest.mark.asyncio async def test_apply_parameters_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2150,17 +2126,16 @@ async def test_apply_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2177,7 +2152,7 @@ def test_apply_software_update( transport: str = "grpc", request_type=cloud_memcache.ApplySoftwareUpdateRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2190,13 +2165,11 @@ def test_apply_software_update( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() # Establish that the response is the type that we expect. @@ -2211,7 +2184,7 @@ def test_apply_software_update_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2221,7 +2194,6 @@ def test_apply_software_update_empty_call(): client.apply_software_update() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() @@ -2231,7 +2203,7 @@ async def test_apply_software_update_async( request_type=cloud_memcache.ApplySoftwareUpdateRequest, ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2246,13 +2218,11 @@ async def test_apply_software_update_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() # Establish that the response is the type that we expect. @@ -2265,11 +2235,12 @@ async def test_apply_software_update_async_from_dict(): def test_apply_software_update_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplySoftwareUpdateRequest() + request.instance = "instance/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2277,7 +2248,6 @@ def test_apply_software_update_field_headers(): type(client.transport.apply_software_update), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. @@ -2292,11 +2262,14 @@ def test_apply_software_update_field_headers(): @pytest.mark.asyncio async def test_apply_software_update_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplySoftwareUpdateRequest() + request.instance = "instance/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2306,7 +2279,6 @@ async def test_apply_software_update_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. @@ -2320,7 +2292,7 @@ async def test_apply_software_update_field_headers_async(): def test_apply_software_update_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2328,7 +2300,6 @@ def test_apply_software_update_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_software_update( @@ -2339,16 +2310,13 @@ def test_apply_software_update_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].instance == "instance_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True def test_apply_software_update_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2363,7 +2331,9 @@ def test_apply_software_update_flattened_error(): @pytest.mark.asyncio async def test_apply_software_update_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2385,17 +2355,16 @@ async def test_apply_software_update_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].instance == "instance_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_software_update_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2411,16 +2380,16 @@ async def test_apply_software_update_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2430,7 +2399,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2441,7 +2410,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = CloudMemcacheClient(transport=transport) assert client.transport is transport @@ -2450,13 +2419,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2471,23 +2440,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) def test_cloud_memcache_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2499,7 +2468,7 @@ def test_cloud_memcache_base_transport(): ) as Transport: Transport.return_value = None transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2524,15 +2493,37 @@ def test_cloud_memcache_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2545,19 +2536,33 @@ def test_cloud_memcache_base_transport_with_credentials_file(): def test_cloud_memcache_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_cloud_memcache_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudMemcacheClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudMemcacheClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2565,20 +2570,156 @@ def test_cloud_memcache_auth_adc(): ) -def test_cloud_memcache_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cloud_memcache_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="memcache.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cloud_memcache_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cloud_memcache_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2587,7 +2728,7 @@ def test_cloud_memcache_transport_auth_adc(): ], ) def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2626,7 +2767,7 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla def test_cloud_memcache_host_no_port(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com" ), @@ -2636,7 +2777,7 @@ def test_cloud_memcache_host_no_port(): def test_cloud_memcache_host_with_port(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com:8000" ), @@ -2690,9 +2831,9 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2768,7 +2909,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): def test_cloud_memcache_grpc_lro_client(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2781,7 +2922,7 @@ def test_cloud_memcache_grpc_lro_client(): def test_cloud_memcache_grpc_lro_async_client(): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2796,7 +2937,6 @@ def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @@ -2819,7 +2959,6 @@ def test_parse_instance_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2840,7 +2979,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = CloudMemcacheClient.common_folder_path(folder) assert expected == actual @@ -2859,7 +2997,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = CloudMemcacheClient.common_organization_path(organization) assert expected == actual @@ -2878,7 +3015,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = CloudMemcacheClient.common_project_path(project) assert expected == actual @@ -2898,7 +3034,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2925,7 +3060,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.CloudMemcacheTransport, "_prep_wrapped_messages" ) as prep: client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2934,6 +3069,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = CloudMemcacheClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) From 44989ebc889a2f35dac8f1935e3a0e2d5087af3d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 20 May 2021 14:08:40 -0600 Subject: [PATCH 022/159] Revert "chore: upgrade gapic-generator-python to 0.46.3 (#57)" (#61) This reverts commit 2ad1bfbee1f847c1b150b0e1595faba63f42d768. --- docs/memcache_v1/cloud_memcache.rst | 1 + docs/memcache_v1beta2/cloud_memcache.rst | 1 + google/cloud/memcache/__init__.py | 12 +- google/cloud/memcache_v1/__init__.py | 9 +- google/cloud/memcache_v1/gapic_metadata.json | 93 --- google/cloud/memcache_v1/services/__init__.py | 1 + .../services/cloud_memcache/__init__.py | 2 + .../services/cloud_memcache/async_client.py | 51 +- .../services/cloud_memcache/client.py | 94 ++- .../services/cloud_memcache/pagers.py | 4 +- .../cloud_memcache/transports/__init__.py | 2 + .../cloud_memcache/transports/base.py | 132 +--- .../cloud_memcache/transports/grpc.py | 44 +- .../cloud_memcache/transports/grpc_asyncio.py | 45 +- google/cloud/memcache_v1/types/__init__.py | 2 + .../cloud/memcache_v1/types/cloud_memcache.py | 151 ++-- google/cloud/memcache_v1beta2/__init__.py | 9 +- .../memcache_v1beta2/gapic_metadata.json | 103 --- .../memcache_v1beta2/services/__init__.py | 1 + .../services/cloud_memcache/__init__.py | 2 + .../services/cloud_memcache/async_client.py | 54 +- .../services/cloud_memcache/client.py | 97 ++- .../services/cloud_memcache/pagers.py | 4 +- .../cloud_memcache/transports/__init__.py | 2 + .../cloud_memcache/transports/base.py | 136 ++-- .../cloud_memcache/transports/grpc.py | 50 +- .../cloud_memcache/transports/grpc_asyncio.py | 49 +- .../cloud/memcache_v1beta2/types/__init__.py | 2 + .../memcache_v1beta2/types/cloud_memcache.py | 166 +++-- scripts/fixup_memcache_v1_keywords.py | 19 +- scripts/fixup_memcache_v1beta2_keywords.py | 21 +- setup.py | 1 - testing/constraints-3.6.txt | 2 - tests/__init__.py | 15 - tests/unit/__init__.py | 15 - tests/unit/gapic/__init__.py | 15 - tests/unit/gapic/memcache_v1/__init__.py | 1 + .../gapic/memcache_v1/test_cloud_memcache.py | 615 +++++++--------- tests/unit/gapic/memcache_v1beta2/__init__.py | 1 + .../memcache_v1beta2/test_cloud_memcache.py | 657 +++++++----------- 40 files changed, 1123 insertions(+), 1558 deletions(-) delete mode 100644 google/cloud/memcache_v1/gapic_metadata.json delete mode 100644 google/cloud/memcache_v1beta2/gapic_metadata.json delete mode 100644 tests/__init__.py delete mode 100644 tests/unit/__init__.py delete mode 100644 tests/unit/gapic/__init__.py diff --git a/docs/memcache_v1/cloud_memcache.rst b/docs/memcache_v1/cloud_memcache.rst index 35de375..0c21866 100644 --- a/docs/memcache_v1/cloud_memcache.rst +++ b/docs/memcache_v1/cloud_memcache.rst @@ -5,6 +5,7 @@ CloudMemcache :members: :inherited-members: + .. automodule:: google.cloud.memcache_v1.services.cloud_memcache.pagers :members: :inherited-members: diff --git a/docs/memcache_v1beta2/cloud_memcache.rst b/docs/memcache_v1beta2/cloud_memcache.rst index 7dc7a82..b20fc3a 100644 --- a/docs/memcache_v1beta2/cloud_memcache.rst +++ b/docs/memcache_v1beta2/cloud_memcache.rst @@ -5,6 +5,7 @@ CloudMemcache :members: :inherited-members: + .. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache.pagers :members: :inherited-members: diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index f2b7c14..4075bad 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +15,10 @@ # limitations under the License. # -from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( CloudMemcacheAsyncClient, ) - +from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient from google.cloud.memcache_v1.types.cloud_memcache import ApplyParametersRequest from google.cloud.memcache_v1.types.cloud_memcache import CreateInstanceRequest from google.cloud.memcache_v1.types.cloud_memcache import DeleteInstanceRequest @@ -27,15 +27,15 @@ from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesRequest from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesResponse from google.cloud.memcache_v1.types.cloud_memcache import MemcacheParameters +from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion from google.cloud.memcache_v1.types.cloud_memcache import OperationMetadata from google.cloud.memcache_v1.types.cloud_memcache import UpdateInstanceRequest from google.cloud.memcache_v1.types.cloud_memcache import UpdateParametersRequest -from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion __all__ = ( - "CloudMemcacheClient", - "CloudMemcacheAsyncClient", "ApplyParametersRequest", + "CloudMemcacheAsyncClient", + "CloudMemcacheClient", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -43,8 +43,8 @@ "ListInstancesRequest", "ListInstancesResponse", "MemcacheParameters", + "MemcacheVersion", "OperationMetadata", "UpdateInstanceRequest", "UpdateParametersRequest", - "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py index 7d3016d..4d28d1b 100644 --- a/google/cloud/memcache_v1/__init__.py +++ b/google/cloud/memcache_v1/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,8 +16,6 @@ # from .services.cloud_memcache import CloudMemcacheClient -from .services.cloud_memcache import CloudMemcacheAsyncClient - from .types.cloud_memcache import ApplyParametersRequest from .types.cloud_memcache import CreateInstanceRequest from .types.cloud_memcache import DeleteInstanceRequest @@ -25,15 +24,14 @@ from .types.cloud_memcache import ListInstancesRequest from .types.cloud_memcache import ListInstancesResponse from .types.cloud_memcache import MemcacheParameters +from .types.cloud_memcache import MemcacheVersion from .types.cloud_memcache import OperationMetadata from .types.cloud_memcache import UpdateInstanceRequest from .types.cloud_memcache import UpdateParametersRequest -from .types.cloud_memcache import MemcacheVersion + __all__ = ( - "CloudMemcacheAsyncClient", "ApplyParametersRequest", - "CloudMemcacheClient", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -45,4 +43,5 @@ "OperationMetadata", "UpdateInstanceRequest", "UpdateParametersRequest", + "CloudMemcacheClient", ) diff --git a/google/cloud/memcache_v1/gapic_metadata.json b/google/cloud/memcache_v1/gapic_metadata.json deleted file mode 100644 index 08d37fa..0000000 --- a/google/cloud/memcache_v1/gapic_metadata.json +++ /dev/null @@ -1,93 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.memcache_v1", - "protoPackage": "google.cloud.memcache.v1", - "schema": "1.0", - "services": { - "CloudMemcache": { - "clients": { - "grpc": { - "libraryClient": "CloudMemcacheClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudMemcacheAsyncClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - } - } - } - } -} diff --git a/google/cloud/memcache_v1/services/__init__.py b/google/cloud/memcache_v1/services/__init__.py index 4de6597..42ffdf2 100644 --- a/google/cloud/memcache_v1/services/__init__.py +++ b/google/cloud/memcache_v1/services/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py index efb245e..8524cb4 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from .client import CloudMemcacheClient from .async_client import CloudMemcacheAsyncClient diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 738a2db..b09fddb 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from collections import OrderedDict import functools import re @@ -20,19 +22,20 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.types import cloud_memcache -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport from .client import CloudMemcacheClient @@ -67,26 +70,31 @@ class CloudMemcacheAsyncClient: instance_path = staticmethod(CloudMemcacheClient.instance_path) parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) + common_billing_account_path = staticmethod( CloudMemcacheClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( CloudMemcacheClient.parse_common_billing_account_path ) + common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) parse_common_folder_path = staticmethod( CloudMemcacheClient.parse_common_folder_path ) + common_organization_path = staticmethod( CloudMemcacheClient.common_organization_path ) parse_common_organization_path = staticmethod( CloudMemcacheClient.parse_common_organization_path ) + common_project_path = staticmethod(CloudMemcacheClient.common_project_path) parse_common_project_path = staticmethod( CloudMemcacheClient.parse_common_project_path ) + common_location_path = staticmethod(CloudMemcacheClient.common_location_path) parse_common_location_path = staticmethod( CloudMemcacheClient.parse_common_location_path @@ -94,8 +102,7 @@ class CloudMemcacheAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. + """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. @@ -110,7 +117,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -127,7 +134,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Returns the transport used by the client instance. + """Return the transport used by the client instance. Returns: CloudMemcacheTransport: The transport used by the client instance. @@ -141,12 +148,12 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the cloud memcache client. + """Instantiate the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -178,6 +185,7 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ + self._client = CloudMemcacheClient( credentials=credentials, transport=transport, @@ -209,6 +217,7 @@ async def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -238,6 +247,7 @@ async def list_instances( # If we have keyword arguments corresponding to fields on the # request, apply these. + if parent is not None: request.parent = parent @@ -291,6 +301,7 @@ async def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -315,6 +326,7 @@ async def get_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name @@ -386,6 +398,7 @@ async def create_instance( This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -414,6 +427,7 @@ async def create_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. + if parent is not None: request.parent = parent if instance is not None: @@ -454,7 +468,7 @@ async def update_instance( request: cloud_memcache.UpdateInstanceRequest = None, *, instance: cloud_memcache.Instance = None, - update_mask: field_mask_pb2.FieldMask = None, + update_mask: field_mask.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -481,6 +495,7 @@ async def update_instance( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -509,6 +524,7 @@ async def update_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. + if instance is not None: request.instance = instance if update_mask is not None: @@ -549,7 +565,7 @@ async def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask_pb2.FieldMask = None, + update_mask: field_mask.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -584,6 +600,7 @@ async def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -612,6 +629,7 @@ async def update_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name if update_mask is not None: @@ -671,6 +689,7 @@ async def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -710,6 +729,7 @@ async def delete_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name @@ -734,7 +754,7 @@ async def delete_instance( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty_pb2.Empty, + empty.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -786,6 +806,7 @@ async def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -814,10 +835,12 @@ async def apply_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name if apply_all is not None: request.apply_all = apply_all + if node_ids: request.node_ids.extend(node_ids) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 3152eee..591ee49 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from collections import OrderedDict from distutils import util import os @@ -21,10 +23,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,9 +36,10 @@ from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.types import cloud_memcache -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport @@ -55,7 +58,7 @@ class CloudMemcacheClientMeta(type): _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: - """Returns an appropriate transport class. + """Return an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -97,8 +100,7 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - + """Convert api endpoint to mTLS endpoint. Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -132,8 +134,7 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. + """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. @@ -150,7 +151,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -169,24 +170,23 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Returns the transport used by the client instance. + """Return the transport used by the client instance. Returns: - CloudMemcacheTransport: The transport used by the client - instance. + CloudMemcacheTransport: The transport used by the client instance. """ return self._transport @staticmethod def instance_path(project: str, location: str, instance: str,) -> str: - """Returns a fully-qualified instance string.""" + """Return a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @staticmethod def parse_instance_path(path: str) -> Dict[str, str]: - """Parses a instance path into its component segments.""" + """Parse a instance path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path, @@ -195,7 +195,7 @@ def parse_instance_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Returns a fully-qualified billing_account string.""" + """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -208,7 +208,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Returns a fully-qualified folder string.""" + """Return a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -219,7 +219,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Returns a fully-qualified organization string.""" + """Return a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -230,7 +230,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Returns a fully-qualified project string.""" + """Return a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -241,7 +241,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Returns a fully-qualified location string.""" + """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -255,12 +255,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the cloud memcache client. + """Instantiate the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -315,10 +315,9 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -330,14 +329,12 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -352,8 +349,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, " + "provide its scopes directly." ) self._transport = transport else: @@ -392,6 +389,7 @@ def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -423,8 +421,10 @@ def list_instances( # there are no flattened fields. if not isinstance(request, cloud_memcache.ListInstancesRequest): request = cloud_memcache.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if parent is not None: request.parent = parent @@ -474,6 +474,7 @@ def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -500,8 +501,10 @@ def get_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.GetInstanceRequest): request = cloud_memcache.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name @@ -569,6 +572,7 @@ def create_instance( This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -599,8 +603,10 @@ def create_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.CreateInstanceRequest): request = cloud_memcache.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if parent is not None: request.parent = parent if instance is not None: @@ -637,7 +643,7 @@ def update_instance( request: cloud_memcache.UpdateInstanceRequest = None, *, instance: cloud_memcache.Instance = None, - update_mask: field_mask_pb2.FieldMask = None, + update_mask: field_mask.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -664,6 +670,7 @@ def update_instance( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -694,8 +701,10 @@ def update_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateInstanceRequest): request = cloud_memcache.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if instance is not None: request.instance = instance if update_mask is not None: @@ -732,7 +741,7 @@ def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask_pb2.FieldMask = None, + update_mask: field_mask.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -767,6 +776,7 @@ def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -797,8 +807,10 @@ def update_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateParametersRequest): request = cloud_memcache.UpdateParametersRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name if update_mask is not None: @@ -854,6 +866,7 @@ def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -895,8 +908,10 @@ def delete_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.DeleteInstanceRequest): request = cloud_memcache.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name @@ -917,7 +932,7 @@ def delete_instance( response = operation.from_gapic( response, self._transport.operations_client, - empty_pb2.Empty, + empty.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -969,6 +984,7 @@ def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -999,8 +1015,10 @@ def apply_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplyParametersRequest): request = cloud_memcache.ApplyParametersRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name if node_ids is not None: diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py index 7723778..7a1324e 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from typing import ( Any, AsyncIterable, @@ -115,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiates the pager. + """Instantiate the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py index 32ad848..38122c6 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 393f376..c9b57c7 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,21 +14,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version +import typing import pkg_resources -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 as operations # type: ignore + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,41 +37,27 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - -_API_CORE_VERSION = google.api_core.__version__ - class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = "memcache.googleapis.com" - def __init__( self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): - The hostname to connect to. + host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -79,7 +66,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. + scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -93,76 +80,29 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( + raise exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id + credentials, _ = auth.default( + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -197,11 +137,11 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_instances( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.ListInstancesRequest], - Union[ + typing.Union[ cloud_memcache.ListInstancesResponse, - Awaitable[cloud_memcache.ListInstancesResponse], + typing.Awaitable[cloud_memcache.ListInstancesResponse], ], ]: raise NotImplementedError() @@ -209,54 +149,56 @@ def list_instances( @property def get_instance( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.GetInstanceRequest], - Union[cloud_memcache.Instance, Awaitable[cloud_memcache.Instance]], + typing.Union[ + cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] + ], ]: raise NotImplementedError() @property def create_instance( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.CreateInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def update_instance( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.UpdateInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def update_parameters( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.UpdateParametersRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def delete_instance( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.DeleteInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def apply_parameters( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.ApplyParametersRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 5467640..61c7dde 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,20 +14,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union +from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 as operations # type: ignore + from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO @@ -67,7 +70,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -81,8 +84,7 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): - The hostname to connect to. + host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -193,7 +195,7 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -224,15 +226,13 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - + scopes = scopes or cls.AUTH_SCOPES return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, + scopes=scopes, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, **kwargs, ) @@ -313,7 +313,7 @@ def get_instance( @property def create_instance( self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a new Instance in a given location. @@ -332,14 +332,14 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["create_instance"] @property def update_instance( self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: r"""Return a callable for the update instance method over gRPC. Updates an existing Instance in a given project and @@ -359,14 +359,14 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["update_instance"] @property def update_parameters( self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: r"""Return a callable for the update parameters method over gRPC. Updates the defined Memcached Parameters for an @@ -388,14 +388,14 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["update_parameters"] @property def delete_instance( self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a single Instance. @@ -414,14 +414,14 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["delete_instance"] @property def apply_parameters( self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: r"""Return a callable for the apply parameters method over gRPC. ApplyParameters will restart the set of specified @@ -442,7 +442,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["apply_parameters"] diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index b21cfd3..d669536 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,21 +14,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 as operations # type: ignore + from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .grpc import CloudMemcacheGrpcTransport @@ -70,7 +73,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -97,15 +100,13 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - + scopes = scopes or cls.AUTH_SCOPES return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, + scopes=scopes, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, **kwargs, ) @@ -113,7 +114,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -127,8 +128,7 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): - The hostname to connect to. + host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -187,6 +187,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None + else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -322,7 +323,7 @@ def get_instance( def create_instance( self, ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the create instance method over gRPC. @@ -342,7 +343,7 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["create_instance"] @@ -350,7 +351,7 @@ def create_instance( def update_instance( self, ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the update instance method over gRPC. @@ -371,7 +372,7 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["update_instance"] @@ -379,7 +380,7 @@ def update_instance( def update_parameters( self, ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the update parameters method over gRPC. @@ -402,7 +403,7 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["update_parameters"] @@ -410,7 +411,7 @@ def update_parameters( def delete_instance( self, ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the delete instance method over gRPC. @@ -430,7 +431,7 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["delete_instance"] @@ -438,7 +439,7 @@ def delete_instance( def apply_parameters( self, ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the apply parameters method over gRPC. @@ -460,7 +461,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["apply_parameters"] diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index 2430991..29acce6 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from .cloud_memcache import ( ApplyParametersRequest, CreateInstanceRequest, diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index 2a6777e..a148657 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,10 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import proto # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore __protobuf__ = proto.module( @@ -46,6 +49,7 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): r""" + Attributes: name (str): Required. Unique name of the resource in this scope @@ -129,6 +133,7 @@ class State(proto.Enum): class NodeConfig(proto.Message): r"""Configuration for a Memcached Node. + Attributes: cpu_count (int): Required. Number of cpus per Memcached node. @@ -137,11 +142,13 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field(proto.INT32, number=1,) - memory_size_mb = proto.Field(proto.INT32, number=2,) + cpu_count = proto.Field(proto.INT32, number=1) + + memory_size_mb = proto.Field(proto.INT32, number=2) class Node(proto.Message): r""" + Attributes: node_id (str): Output only. Identifier of the Memcached @@ -173,15 +180,21 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field(proto.STRING, number=1,) - zone = proto.Field(proto.STRING, number=2,) + node_id = proto.Field(proto.STRING, number=1) + + zone = proto.Field(proto.STRING, number=2) + state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) - host = proto.Field(proto.STRING, number=4,) - port = proto.Field(proto.INT32, number=5,) + + host = proto.Field(proto.STRING, number=4) + + port = proto.Field(proto.INT32, number=5) + parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) class InstanceMessage(proto.Message): r""" + Attributes: code (google.cloud.memcache_v1.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- @@ -197,30 +210,42 @@ class Code(proto.Enum): ZONE_DISTRIBUTION_UNBALANCED = 1 code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) - message = proto.Field(proto.STRING, number=2,) - - name = proto.Field(proto.STRING, number=1,) - display_name = proto.Field(proto.STRING, number=2,) - labels = proto.MapField(proto.STRING, proto.STRING, number=3,) - authorized_network = proto.Field(proto.STRING, number=4,) - zones = proto.RepeatedField(proto.STRING, number=5,) - node_count = proto.Field(proto.INT32, number=6,) + + message = proto.Field(proto.STRING, number=2) + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + + authorized_network = proto.Field(proto.STRING, number=4) + + zones = proto.RepeatedField(proto.STRING, number=5) + + node_count = proto.Field(proto.INT32, number=6) + node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) + memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) + parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) + memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) - create_time = proto.Field( - proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, - ) + + create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + state = proto.Field(proto.ENUM, number=15, enum=State,) - memcache_full_version = proto.Field(proto.STRING, number=18,) + + memcache_full_version = proto.Field(proto.STRING, number=18) + instance_messages = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - discovery_endpoint = proto.Field(proto.STRING, number=20,) + + discovery_endpoint = proto.Field(proto.STRING, number=20) class ListInstancesRequest(proto.Message): @@ -253,11 +278,15 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=4,) - order_by = proto.Field(proto.STRING, number=5,) + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + filter = proto.Field(proto.STRING, number=4) + + order_by = proto.Field(proto.STRING, number=5) class ListInstancesResponse(proto.Message): @@ -285,8 +314,10 @@ def raw_page(self): return self instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - next_page_token = proto.Field(proto.STRING, number=2,) - unreachable = proto.RepeatedField(proto.STRING, number=3,) + + next_page_token = proto.Field(proto.STRING, number=2) + + unreachable = proto.RepeatedField(proto.STRING, number=3) class GetInstanceRequest(proto.Message): @@ -300,7 +331,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=1) class CreateInstanceRequest(proto.Message): @@ -329,8 +360,10 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached Instance """ - parent = proto.Field(proto.STRING, number=1,) - instance_id = proto.Field(proto.STRING, number=2,) + parent = proto.Field(proto.STRING, number=1) + + instance_id = proto.Field(proto.STRING, number=2) + instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) @@ -348,9 +381,8 @@ class UpdateInstanceRequest(proto.Message): update_mask are updated. """ - update_mask = proto.Field( - proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) + instance = proto.Field(proto.MESSAGE, number=2, message="Instance",) @@ -365,7 +397,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=1) class ApplyParametersRequest(proto.Message): @@ -388,9 +420,11 @@ class ApplyParametersRequest(proto.Message): nodes within the instance. """ - name = proto.Field(proto.STRING, number=1,) - node_ids = proto.RepeatedField(proto.STRING, number=2,) - apply_all = proto.Field(proto.BOOL, number=3,) + name = proto.Field(proto.STRING, number=1) + + node_ids = proto.RepeatedField(proto.STRING, number=2) + + apply_all = proto.Field(proto.BOOL, number=3) class UpdateParametersRequest(proto.Message): @@ -408,15 +442,16 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field(proto.STRING, number=1,) - update_mask = proto.Field( - proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, - ) + name = proto.Field(proto.STRING, number=1) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) class MemcacheParameters(proto.Message): r""" + Attributes: id (str): Output only. The unique ID associated with @@ -430,12 +465,14 @@ class MemcacheParameters(proto.Message): memcached process. """ - id = proto.Field(proto.STRING, number=1,) - params = proto.MapField(proto.STRING, proto.STRING, number=3,) + id = proto.Field(proto.STRING, number=1) + + params = proto.MapField(proto.STRING, proto.STRING, number=3) class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. + Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -463,13 +500,19 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - target = proto.Field(proto.STRING, number=3,) - verb = proto.Field(proto.STRING, number=4,) - status_detail = proto.Field(proto.STRING, number=5,) - cancel_requested = proto.Field(proto.BOOL, number=6,) - api_version = proto.Field(proto.STRING, number=7,) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + target = proto.Field(proto.STRING, number=3) + + verb = proto.Field(proto.STRING, number=4) + + status_detail = proto.Field(proto.STRING, number=5) + + cancel_requested = proto.Field(proto.BOOL, number=6) + + api_version = proto.Field(proto.STRING, number=7) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index 54fa8a3..bac2393 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,8 +16,6 @@ # from .services.cloud_memcache import CloudMemcacheClient -from .services.cloud_memcache import CloudMemcacheAsyncClient - from .types.cloud_memcache import ApplyParametersRequest from .types.cloud_memcache import ApplySoftwareUpdateRequest from .types.cloud_memcache import CreateInstanceRequest @@ -27,17 +26,16 @@ from .types.cloud_memcache import ListInstancesResponse from .types.cloud_memcache import LocationMetadata from .types.cloud_memcache import MemcacheParameters +from .types.cloud_memcache import MemcacheVersion from .types.cloud_memcache import OperationMetadata from .types.cloud_memcache import UpdateInstanceRequest from .types.cloud_memcache import UpdateParametersRequest from .types.cloud_memcache import ZoneMetadata -from .types.cloud_memcache import MemcacheVersion + __all__ = ( - "CloudMemcacheAsyncClient", "ApplyParametersRequest", "ApplySoftwareUpdateRequest", - "CloudMemcacheClient", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -51,4 +49,5 @@ "UpdateInstanceRequest", "UpdateParametersRequest", "ZoneMetadata", + "CloudMemcacheClient", ) diff --git a/google/cloud/memcache_v1beta2/gapic_metadata.json b/google/cloud/memcache_v1beta2/gapic_metadata.json deleted file mode 100644 index 288ef89..0000000 --- a/google/cloud/memcache_v1beta2/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.memcache_v1beta2", - "protoPackage": "google.cloud.memcache.v1beta2", - "schema": "1.0", - "services": { - "CloudMemcache": { - "clients": { - "grpc": { - "libraryClient": "CloudMemcacheClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "ApplySoftwareUpdate": { - "methods": [ - "apply_software_update" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudMemcacheAsyncClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "ApplySoftwareUpdate": { - "methods": [ - "apply_software_update" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - } - } - } - } -} diff --git a/google/cloud/memcache_v1beta2/services/__init__.py b/google/cloud/memcache_v1beta2/services/__init__.py index 4de6597..42ffdf2 100644 --- a/google/cloud/memcache_v1beta2/services/__init__.py +++ b/google/cloud/memcache_v1beta2/services/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py index efb245e..8524cb4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from .client import CloudMemcacheClient from .async_client import CloudMemcacheAsyncClient diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 4417473..b687488 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from collections import OrderedDict import functools import re @@ -20,19 +22,20 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport from .client import CloudMemcacheClient @@ -67,26 +70,31 @@ class CloudMemcacheAsyncClient: instance_path = staticmethod(CloudMemcacheClient.instance_path) parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) + common_billing_account_path = staticmethod( CloudMemcacheClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( CloudMemcacheClient.parse_common_billing_account_path ) + common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) parse_common_folder_path = staticmethod( CloudMemcacheClient.parse_common_folder_path ) + common_organization_path = staticmethod( CloudMemcacheClient.common_organization_path ) parse_common_organization_path = staticmethod( CloudMemcacheClient.parse_common_organization_path ) + common_project_path = staticmethod(CloudMemcacheClient.common_project_path) parse_common_project_path = staticmethod( CloudMemcacheClient.parse_common_project_path ) + common_location_path = staticmethod(CloudMemcacheClient.common_location_path) parse_common_location_path = staticmethod( CloudMemcacheClient.parse_common_location_path @@ -94,8 +102,7 @@ class CloudMemcacheAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. + """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. @@ -110,7 +117,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -127,7 +134,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Returns the transport used by the client instance. + """Return the transport used by the client instance. Returns: CloudMemcacheTransport: The transport used by the client instance. @@ -141,12 +148,12 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the cloud memcache client. + """Instantiate the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -178,6 +185,7 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ + self._client = CloudMemcacheClient( credentials=credentials, transport=transport, @@ -209,6 +217,7 @@ async def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -238,6 +247,7 @@ async def list_instances( # If we have keyword arguments corresponding to fields on the # request, apply these. + if parent is not None: request.parent = parent @@ -291,6 +301,7 @@ async def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -315,6 +326,7 @@ async def get_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name @@ -386,6 +398,7 @@ async def create_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -415,6 +428,7 @@ async def create_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. + if parent is not None: request.parent = parent if instance_id is not None: @@ -454,7 +468,7 @@ async def update_instance( self, request: cloud_memcache.UpdateInstanceRequest = None, *, - update_mask: field_mask_pb2.FieldMask = None, + update_mask: field_mask.FieldMask = None, resource: cloud_memcache.Instance = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -482,6 +496,7 @@ async def update_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -511,6 +526,7 @@ async def update_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. + if update_mask is not None: request.update_mask = update_mask if resource is not None: @@ -551,7 +567,7 @@ async def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask_pb2.FieldMask = None, + update_mask: field_mask.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -586,6 +602,7 @@ async def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -615,6 +632,7 @@ async def update_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name if update_mask is not None: @@ -674,6 +692,7 @@ async def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -713,6 +732,7 @@ async def delete_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name @@ -737,7 +757,7 @@ async def delete_instance( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty_pb2.Empty, + empty.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -787,6 +807,7 @@ async def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -816,10 +837,12 @@ async def apply_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name if apply_all is not None: request.apply_all = apply_all + if node_ids: request.node_ids.extend(node_ids) @@ -896,6 +919,7 @@ async def apply_software_update( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -925,10 +949,12 @@ async def apply_software_update( # If we have keyword arguments corresponding to fields on the # request, apply these. + if instance is not None: request.instance = instance if apply_all is not None: request.apply_all = apply_all + if node_ids: request.node_ids.extend(node_ids) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 65a2bc3..8d43719 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from collections import OrderedDict from distutils import util import os @@ -21,10 +23,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,9 +36,10 @@ from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport @@ -55,7 +58,7 @@ class CloudMemcacheClientMeta(type): _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: - """Returns an appropriate transport class. + """Return an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -97,8 +100,7 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - + """Convert api endpoint to mTLS endpoint. Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -132,8 +134,7 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. + """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. @@ -150,7 +151,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -169,24 +170,23 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Returns the transport used by the client instance. + """Return the transport used by the client instance. Returns: - CloudMemcacheTransport: The transport used by the client - instance. + CloudMemcacheTransport: The transport used by the client instance. """ return self._transport @staticmethod def instance_path(project: str, location: str, instance: str,) -> str: - """Returns a fully-qualified instance string.""" + """Return a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @staticmethod def parse_instance_path(path: str) -> Dict[str, str]: - """Parses a instance path into its component segments.""" + """Parse a instance path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path, @@ -195,7 +195,7 @@ def parse_instance_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Returns a fully-qualified billing_account string.""" + """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -208,7 +208,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Returns a fully-qualified folder string.""" + """Return a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -219,7 +219,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Returns a fully-qualified organization string.""" + """Return a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -230,7 +230,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Returns a fully-qualified project string.""" + """Return a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -241,7 +241,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Returns a fully-qualified location string.""" + """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -255,12 +255,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the cloud memcache client. + """Instantiate the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -315,10 +315,9 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -330,14 +329,12 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -352,8 +349,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, " + "provide its scopes directly." ) self._transport = transport else: @@ -392,6 +389,7 @@ def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -423,8 +421,10 @@ def list_instances( # there are no flattened fields. if not isinstance(request, cloud_memcache.ListInstancesRequest): request = cloud_memcache.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if parent is not None: request.parent = parent @@ -474,6 +474,7 @@ def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -500,8 +501,10 @@ def get_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.GetInstanceRequest): request = cloud_memcache.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name @@ -569,6 +572,7 @@ def create_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -600,8 +604,10 @@ def create_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.CreateInstanceRequest): request = cloud_memcache.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if parent is not None: request.parent = parent if instance_id is not None: @@ -637,7 +643,7 @@ def update_instance( self, request: cloud_memcache.UpdateInstanceRequest = None, *, - update_mask: field_mask_pb2.FieldMask = None, + update_mask: field_mask.FieldMask = None, resource: cloud_memcache.Instance = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -665,6 +671,7 @@ def update_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -696,8 +703,10 @@ def update_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateInstanceRequest): request = cloud_memcache.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if update_mask is not None: request.update_mask = update_mask if resource is not None: @@ -734,7 +743,7 @@ def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask_pb2.FieldMask = None, + update_mask: field_mask.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -769,6 +778,7 @@ def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -800,8 +810,10 @@ def update_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateParametersRequest): request = cloud_memcache.UpdateParametersRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name if update_mask is not None: @@ -857,6 +869,7 @@ def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -898,8 +911,10 @@ def delete_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.DeleteInstanceRequest): request = cloud_memcache.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name @@ -920,7 +935,7 @@ def delete_instance( response = operation.from_gapic( response, self._transport.operations_client, - empty_pb2.Empty, + empty.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -970,6 +985,7 @@ def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1001,8 +1017,10 @@ def apply_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplyParametersRequest): request = cloud_memcache.ApplyParametersRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if name is not None: request.name = name if node_ids is not None: @@ -1079,6 +1097,7 @@ def apply_software_update( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1110,8 +1129,10 @@ def apply_software_update( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplySoftwareUpdateRequest): request = cloud_memcache.ApplySoftwareUpdateRequest(request) + # If we have keyword arguments corresponding to fields on the # request, apply these. + if instance is not None: request.instance = instance if node_ids is not None: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index 381459d..5b69afd 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from typing import ( Any, AsyncIterable, @@ -115,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiates the pager. + """Instantiate the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py index 32ad848..38122c6 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 1fb1292..a1b9e2c 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,21 +14,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version +import typing import pkg_resources -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 as operations # type: ignore + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,41 +37,27 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - -_API_CORE_VERSION = google.api_core.__version__ - class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = "memcache.googleapis.com" - def __init__( self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): - The hostname to connect to. + host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -79,7 +66,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. + scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -93,76 +80,29 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( + raise exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id + credentials, _ = auth.default( + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -202,11 +142,11 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_instances( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.ListInstancesRequest], - Union[ + typing.Union[ cloud_memcache.ListInstancesResponse, - Awaitable[cloud_memcache.ListInstancesResponse], + typing.Awaitable[cloud_memcache.ListInstancesResponse], ], ]: raise NotImplementedError() @@ -214,63 +154,65 @@ def list_instances( @property def get_instance( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.GetInstanceRequest], - Union[cloud_memcache.Instance, Awaitable[cloud_memcache.Instance]], + typing.Union[ + cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] + ], ]: raise NotImplementedError() @property def create_instance( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.CreateInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def update_instance( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.UpdateInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def update_parameters( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.UpdateParametersRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def delete_instance( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.DeleteInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def apply_parameters( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.ApplyParametersRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() @property def apply_software_update( self, - ) -> Callable[ + ) -> typing.Callable[ [cloud_memcache.ApplySoftwareUpdateRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index cf61dee..f5f202f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,20 +14,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union +from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 as operations # type: ignore + from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO @@ -67,7 +70,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -81,8 +84,7 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): - The hostname to connect to. + host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -193,7 +195,7 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -224,15 +226,13 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - + scopes = scopes or cls.AUTH_SCOPES return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, + scopes=scopes, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, **kwargs, ) @@ -313,7 +313,7 @@ def get_instance( @property def create_instance( self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a new Instance in a given location. @@ -332,14 +332,14 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["create_instance"] @property def update_instance( self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: r"""Return a callable for the update instance method over gRPC. Updates an existing Instance in a given project and @@ -359,14 +359,14 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["update_instance"] @property def update_parameters( self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: r"""Return a callable for the update parameters method over gRPC. Updates the defined Memcached parameters for an existing @@ -388,14 +388,14 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["update_parameters"] @property def delete_instance( self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a single Instance. @@ -414,14 +414,14 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["delete_instance"] @property def apply_parameters( self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: r"""Return a callable for the apply parameters method over gRPC. ``ApplyParameters`` restarts the set of specified nodes in order @@ -442,16 +442,14 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["apply_parameters"] @property def apply_software_update( self, - ) -> Callable[ - [cloud_memcache.ApplySoftwareUpdateRequest], operations_pb2.Operation - ]: + ) -> Callable[[cloud_memcache.ApplySoftwareUpdateRequest], operations.Operation]: r"""Return a callable for the apply software update method over gRPC. Updates software on the selected nodes of the @@ -471,7 +469,7 @@ def apply_software_update( self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["apply_software_update"] diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index e5a9f15..f2452b4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,21 +14,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 as operations # type: ignore + from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .grpc import CloudMemcacheGrpcTransport @@ -70,7 +73,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -97,15 +100,13 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - + scopes = scopes or cls.AUTH_SCOPES return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, + scopes=scopes, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, **kwargs, ) @@ -113,7 +114,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -127,8 +128,7 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): - The hostname to connect to. + host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -187,6 +187,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None + else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -322,7 +323,7 @@ def get_instance( def create_instance( self, ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the create instance method over gRPC. @@ -342,7 +343,7 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["create_instance"] @@ -350,7 +351,7 @@ def create_instance( def update_instance( self, ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the update instance method over gRPC. @@ -371,7 +372,7 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["update_instance"] @@ -379,7 +380,7 @@ def update_instance( def update_parameters( self, ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the update parameters method over gRPC. @@ -402,7 +403,7 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["update_parameters"] @@ -410,7 +411,7 @@ def update_parameters( def delete_instance( self, ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the delete instance method over gRPC. @@ -430,7 +431,7 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["delete_instance"] @@ -438,7 +439,7 @@ def delete_instance( def apply_parameters( self, ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the apply parameters method over gRPC. @@ -460,7 +461,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["apply_parameters"] @@ -468,7 +469,7 @@ def apply_parameters( def apply_software_update( self, ) -> Callable[ - [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations_pb2.Operation] + [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations.Operation] ]: r"""Return a callable for the apply software update method over gRPC. @@ -489,7 +490,7 @@ def apply_software_update( self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + response_deserializer=operations.Operation.FromString, ) return self._stubs["apply_software_update"] diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index a4e788a..90cf3eb 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from .cloud_memcache import ( ApplyParametersRequest, ApplySoftwareUpdateRequest, diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 6fdd3b8..b7e9a43 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,10 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import proto # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore __protobuf__ = proto.module( @@ -49,6 +52,7 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): r"""A Memorystore for Memcached instance + Attributes: name (str): Required. Unique name of the resource in this scope @@ -138,6 +142,7 @@ class State(proto.Enum): class NodeConfig(proto.Message): r"""Configuration for a Memcached Node. + Attributes: cpu_count (int): Required. Number of cpus per Memcached node. @@ -146,11 +151,13 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field(proto.INT32, number=1,) - memory_size_mb = proto.Field(proto.INT32, number=2,) + cpu_count = proto.Field(proto.INT32, number=1) + + memory_size_mb = proto.Field(proto.INT32, number=2) class Node(proto.Message): r""" + Attributes: node_id (str): Output only. Identifier of the Memcached @@ -185,16 +192,23 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field(proto.STRING, number=1,) - zone = proto.Field(proto.STRING, number=2,) + node_id = proto.Field(proto.STRING, number=1) + + zone = proto.Field(proto.STRING, number=2) + state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) - host = proto.Field(proto.STRING, number=4,) - port = proto.Field(proto.INT32, number=5,) + + host = proto.Field(proto.STRING, number=4) + + port = proto.Field(proto.INT32, number=5) + parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) - update_available = proto.Field(proto.BOOL, number=7,) + + update_available = proto.Field(proto.BOOL, number=7) class InstanceMessage(proto.Message): r""" + Attributes: code (google.cloud.memcache_v1beta2.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- @@ -210,31 +224,44 @@ class Code(proto.Enum): ZONE_DISTRIBUTION_UNBALANCED = 1 code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) - message = proto.Field(proto.STRING, number=2,) - - name = proto.Field(proto.STRING, number=1,) - display_name = proto.Field(proto.STRING, number=2,) - labels = proto.MapField(proto.STRING, proto.STRING, number=3,) - authorized_network = proto.Field(proto.STRING, number=4,) - zones = proto.RepeatedField(proto.STRING, number=5,) - node_count = proto.Field(proto.INT32, number=6,) + + message = proto.Field(proto.STRING, number=2) + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + + authorized_network = proto.Field(proto.STRING, number=4) + + zones = proto.RepeatedField(proto.STRING, number=5) + + node_count = proto.Field(proto.INT32, number=6) + node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) + memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) + parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) + memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) - create_time = proto.Field( - proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, - ) + + create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + state = proto.Field(proto.ENUM, number=15, enum=State,) - memcache_full_version = proto.Field(proto.STRING, number=18,) + + memcache_full_version = proto.Field(proto.STRING, number=18) + instance_messages = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - discovery_endpoint = proto.Field(proto.STRING, number=20,) - update_available = proto.Field(proto.BOOL, number=21,) + + discovery_endpoint = proto.Field(proto.STRING, number=20) + + update_available = proto.Field(proto.BOOL, number=21) class ListInstancesRequest(proto.Message): @@ -267,11 +294,15 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=4,) - order_by = proto.Field(proto.STRING, number=5,) + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + filter = proto.Field(proto.STRING, number=4) + + order_by = proto.Field(proto.STRING, number=5) class ListInstancesResponse(proto.Message): @@ -299,8 +330,10 @@ def raw_page(self): return self resources = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - next_page_token = proto.Field(proto.STRING, number=2,) - unreachable = proto.RepeatedField(proto.STRING, number=3,) + + next_page_token = proto.Field(proto.STRING, number=2) + + unreachable = proto.RepeatedField(proto.STRING, number=3) class GetInstanceRequest(proto.Message): @@ -314,7 +347,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=1) class CreateInstanceRequest(proto.Message): @@ -343,8 +376,10 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached [Instance] resource """ - parent = proto.Field(proto.STRING, number=1,) - instance_id = proto.Field(proto.STRING, number=2,) + parent = proto.Field(proto.STRING, number=1) + + instance_id = proto.Field(proto.STRING, number=2) + resource = proto.Field(proto.MESSAGE, number=3, message="Instance",) @@ -362,9 +397,8 @@ class UpdateInstanceRequest(proto.Message): specified in update_mask are updated. """ - update_mask = proto.Field( - proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) + resource = proto.Field(proto.MESSAGE, number=2, message="Instance",) @@ -379,7 +413,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=1) class ApplyParametersRequest(proto.Message): @@ -401,9 +435,11 @@ class ApplyParametersRequest(proto.Message): within the instance. """ - name = proto.Field(proto.STRING, number=1,) - node_ids = proto.RepeatedField(proto.STRING, number=2,) - apply_all = proto.Field(proto.BOOL, number=3,) + name = proto.Field(proto.STRING, number=1) + + node_ids = proto.RepeatedField(proto.STRING, number=2) + + apply_all = proto.Field(proto.BOOL, number=3) class UpdateParametersRequest(proto.Message): @@ -421,10 +457,10 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field(proto.STRING, number=1,) - update_mask = proto.Field( - proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, - ) + name = proto.Field(proto.STRING, number=1) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) @@ -449,9 +485,11 @@ class ApplySoftwareUpdateRequest(proto.Message): instance. """ - instance = proto.Field(proto.STRING, number=1,) - node_ids = proto.RepeatedField(proto.STRING, number=2,) - apply_all = proto.Field(proto.BOOL, number=3,) + instance = proto.Field(proto.STRING, number=1) + + node_ids = proto.RepeatedField(proto.STRING, number=2) + + apply_all = proto.Field(proto.BOOL, number=3) class MemcacheParameters(proto.Message): @@ -469,12 +507,14 @@ class MemcacheParameters(proto.Message): memcached process. """ - id = proto.Field(proto.STRING, number=1,) - params = proto.MapField(proto.STRING, proto.STRING, number=3,) + id = proto.Field(proto.STRING, number=1) + + params = proto.MapField(proto.STRING, proto.STRING, number=3) class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. + Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -502,13 +542,19 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - target = proto.Field(proto.STRING, number=3,) - verb = proto.Field(proto.STRING, number=4,) - status_detail = proto.Field(proto.STRING, number=5,) - cancel_requested = proto.Field(proto.BOOL, number=6,) - api_version = proto.Field(proto.STRING, number=7,) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + target = proto.Field(proto.STRING, number=3) + + verb = proto.Field(proto.STRING, number=4) + + status_detail = proto.Field(proto.STRING, number=5) + + cancel_requested = proto.Field(proto.BOOL, number=6) + + api_version = proto.Field(proto.STRING, number=7) class LocationMetadata(proto.Message): @@ -529,7 +575,7 @@ class LocationMetadata(proto.Message): class ZoneMetadata(proto.Message): - r""" """ + r"""""" __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py index 774b03f..eac442a 100644 --- a/scripts/fixup_memcache_v1_keywords.py +++ b/scripts/fixup_memcache_v1_keywords.py @@ -1,5 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import argparse import os import libcst as cst @@ -39,13 +41,14 @@ def partition( class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'instance', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'instance', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), + } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -76,7 +79,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) + cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index afe0f42..4267b6f 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -1,5 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import argparse import os import libcst as cst @@ -39,14 +41,15 @@ def partition( class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'resource', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'resource', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'resource', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'resource', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), + } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -77,7 +80,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) + cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/setup.py b/setup.py index da77bb3..e9985ee 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,6 @@ install_requires=( "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.4.0", - "packaging >= 14.3", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 71b8e10..a929701 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -7,5 +7,3 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.22.2 proto-plus==1.15.0 -packaging==14.3 -google-auth==1.24.0 # TODO: remove when google-auth >= 1.25.0 is transitively required through google-api-core diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index 4de6597..0000000 --- a/tests/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py deleted file mode 100644 index 4de6597..0000000 --- a/tests/unit/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py deleted file mode 100644 index 4de6597..0000000 --- a/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py index 4de6597..42ffdf2 100644 --- a/tests/unit/gapic/memcache_v1/__init__.py +++ b/tests/unit/gapic/memcache_v1/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 4970a18..b4793ec 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -23,56 +24,26 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from google import auth from google.api_core import client_options -from google.api_core import exceptions as core_exceptions +from google.api_core import exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials +from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheAsyncClient from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.services.cloud_memcache import transports -from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( - _API_CORE_VERSION, -) -from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.memcache_v1.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore def client_cert_source_callback(): @@ -123,7 +94,7 @@ def test__get_default_mtls_endpoint(): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() + creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -140,7 +111,7 @@ def test_cloud_memcache_client_from_service_account_info(client_class): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() + creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -193,7 +164,7 @@ def test_cloud_memcache_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + transport = transport_class(credentials=credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -481,7 +452,7 @@ def test_list_instances( transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -494,16 +465,21 @@ def test_list_instances( call.return_value = cloud_memcache.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) + response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @@ -515,7 +491,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -523,6 +499,7 @@ def test_list_instances_empty_call(): client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ListInstancesRequest() @@ -531,7 +508,7 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -547,16 +524,20 @@ async def test_list_instances_async( unreachable=["unreachable_value"], ) ) + response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @@ -566,17 +547,17 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() - request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_memcache.ListInstancesResponse() + client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -591,14 +572,11 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() - request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -606,6 +584,7 @@ async def test_list_instances_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.ListInstancesResponse() ) + await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -619,12 +598,13 @@ async def test_list_instances_field_headers_async(): def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances(parent="parent_value",) @@ -633,11 +613,12 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -649,9 +630,7 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -669,14 +648,13 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -687,7 +665,7 @@ async def test_list_instances_flattened_error_async(): def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -725,7 +703,7 @@ def test_list_instances_pager(): def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -755,7 +733,7 @@ def test_list_instances_pages(): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -792,7 +770,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -828,7 +806,7 @@ def test_get_instance( transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -849,23 +827,35 @@ def test_get_instance( memcache_full_version="memcache_full_version_value", discovery_endpoint="discovery_endpoint_value", ) + response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloud_memcache.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.authorized_network == "authorized_network_value" + assert response.zones == ["zones_value"] + assert response.node_count == 1070 + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + assert response.state == cloud_memcache.Instance.State.CREATING + assert response.memcache_full_version == "memcache_full_version_value" + assert response.discovery_endpoint == "discovery_endpoint_value" @@ -877,7 +867,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -885,6 +875,7 @@ def test_get_instance_empty_call(): client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.GetInstanceRequest() @@ -893,7 +884,7 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -916,23 +907,34 @@ async def test_get_instance_async( discovery_endpoint="discovery_endpoint_value", ) ) + response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_memcache.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.authorized_network == "authorized_network_value" + assert response.zones == ["zones_value"] + assert response.node_count == 1070 + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + assert response.state == cloud_memcache.Instance.State.CREATING + assert response.memcache_full_version == "memcache_full_version_value" + assert response.discovery_endpoint == "discovery_endpoint_value" @@ -942,17 +944,17 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_memcache.Instance() + client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -967,14 +969,11 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -982,6 +981,7 @@ async def test_get_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.Instance() ) + await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -995,12 +995,13 @@ async def test_get_instance_field_headers_async(): def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.Instance() + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance(name="name_value",) @@ -1009,11 +1010,12 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1025,9 +1027,7 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1045,14 +1045,13 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1066,7 +1065,7 @@ def test_create_instance( transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1077,11 +1076,13 @@ def test_create_instance( with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1096,7 +1097,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1104,6 +1105,7 @@ def test_create_instance_empty_call(): client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.CreateInstanceRequest() @@ -1112,7 +1114,7 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1125,11 +1127,13 @@ async def test_create_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1142,17 +1146,17 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() - request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1167,14 +1171,11 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() - request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1182,6 +1183,7 @@ async def test_create_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1195,12 +1197,13 @@ async def test_create_instance_field_headers_async(): def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( @@ -1213,13 +1216,16 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].instance == cloud_memcache.Instance(name="name_value") + assert args[0].instance_id == "instance_id_value" def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1234,9 +1240,7 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1258,16 +1262,17 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].instance == cloud_memcache.Instance(name="name_value") + assert args[0].instance_id == "instance_id_value" @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1284,7 +1289,7 @@ def test_update_instance( transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1295,11 +1300,13 @@ def test_update_instance( with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1314,7 +1321,7 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1322,6 +1329,7 @@ def test_update_instance_empty_call(): client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateInstanceRequest() @@ -1330,7 +1338,7 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1343,11 +1351,13 @@ async def test_update_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1360,17 +1370,17 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() - request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1387,14 +1397,11 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() - request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1402,6 +1409,7 @@ async def test_update_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1417,29 +1425,32 @@ async def test_update_instance_field_headers_async(): def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1447,15 +1458,13 @@ def test_update_instance_flattened_error(): client.update_instance( cloud_memcache.UpdateInstanceRequest(), instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1469,22 +1478,22 @@ async def test_update_instance_flattened_async(): # using the keyword arguments to the method. response = await client.update_instance( instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1492,7 +1501,7 @@ async def test_update_instance_flattened_error_async(): await client.update_instance( cloud_memcache.UpdateInstanceRequest(), instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), ) @@ -1500,7 +1509,7 @@ def test_update_parameters( transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1513,11 +1522,13 @@ def test_update_parameters( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1532,7 +1543,7 @@ def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1542,6 +1553,7 @@ def test_update_parameters_empty_call(): client.update_parameters() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateParametersRequest() @@ -1550,7 +1562,7 @@ async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1565,11 +1577,13 @@ async def test_update_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1582,12 +1596,11 @@ async def test_update_parameters_async_from_dict(): def test_update_parameters_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1595,6 +1608,7 @@ def test_update_parameters_field_headers(): type(client.transport.update_parameters), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1609,14 +1623,11 @@ def test_update_parameters_field_headers(): @pytest.mark.asyncio async def test_update_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1626,6 +1637,7 @@ async def test_update_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1639,7 +1651,7 @@ async def test_update_parameters_field_headers_async(): def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1647,11 +1659,12 @@ def test_update_parameters_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_parameters( name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1659,13 +1672,16 @@ def test_update_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1673,16 +1689,14 @@ def test_update_parameters_flattened_error(): client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @pytest.mark.asyncio async def test_update_parameters_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1698,7 +1712,7 @@ async def test_update_parameters_flattened_async(): # using the keyword arguments to the method. response = await client.update_parameters( name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1706,16 +1720,17 @@ async def test_update_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") @pytest.mark.asyncio async def test_update_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1723,7 +1738,7 @@ async def test_update_parameters_flattened_error_async(): await client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1732,7 +1747,7 @@ def test_delete_instance( transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1743,11 +1758,13 @@ def test_delete_instance( with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1762,7 +1779,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1770,6 +1787,7 @@ def test_delete_instance_empty_call(): client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.DeleteInstanceRequest() @@ -1778,7 +1796,7 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1791,11 +1809,13 @@ async def test_delete_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1808,17 +1828,17 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1833,14 +1853,11 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1848,6 +1865,7 @@ async def test_delete_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1861,12 +1879,13 @@ async def test_delete_instance_field_headers_async(): def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance(name="name_value",) @@ -1875,11 +1894,12 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1891,9 +1911,7 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1911,14 +1929,13 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1932,7 +1949,7 @@ def test_apply_parameters( transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1943,11 +1960,13 @@ def test_apply_parameters( with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -1962,7 +1981,7 @@ def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1970,6 +1989,7 @@ def test_apply_parameters_empty_call(): client.apply_parameters() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplyParametersRequest() @@ -1978,7 +1998,7 @@ async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1991,11 +2011,13 @@ async def test_apply_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -2008,17 +2030,17 @@ async def test_apply_parameters_async_from_dict(): def test_apply_parameters_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2033,14 +2055,11 @@ def test_apply_parameters_field_headers(): @pytest.mark.asyncio async def test_apply_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2048,6 +2067,7 @@ async def test_apply_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2061,12 +2081,13 @@ async def test_apply_parameters_field_headers_async(): def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_parameters( @@ -2077,13 +2098,16 @@ def test_apply_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + assert args[0].node_ids == ["node_ids_value"] + assert args[0].apply_all == True def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2098,9 +2122,7 @@ def test_apply_parameters_flattened_error(): @pytest.mark.asyncio async def test_apply_parameters_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2120,16 +2142,17 @@ async def test_apply_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + assert args[0].node_ids == ["node_ids_value"] + assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2145,16 +2168,16 @@ async def test_apply_parameters_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2164,7 +2187,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2175,7 +2198,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) client = CloudMemcacheClient(transport=transport) assert client.transport is transport @@ -2184,13 +2207,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2205,23 +2228,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) def test_cloud_memcache_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): + with pytest.raises(exceptions.DuplicateCredentialArgs): transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2233,7 +2256,7 @@ def test_cloud_memcache_base_transport(): ) as Transport: Transport.return_value = None transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2257,37 +2280,15 @@ def test_cloud_memcache_base_transport(): transport.operations_client -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True + auth, "load_credentials_from_file" ) as load_creds, mock.patch( "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2300,33 +2301,19 @@ def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): def test_cloud_memcache_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + with mock.patch.object(auth, "default") as adc, mock.patch( "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport() adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudMemcacheClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) CloudMemcacheClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2334,156 +2321,20 @@ def test_cloud_memcache_auth_adc_old_google_auth(): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_cloud_memcache_transport_auth_adc(transport_class): +def test_cloud_memcache_transport_auth_adc(): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_gte_1_26_0 -def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="memcache.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_memcache_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_memcache_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2492,7 +2343,7 @@ def test_cloud_memcache_transport_create_channel_user_scopes( ], ) def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() + cred = credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2531,7 +2382,7 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla def test_cloud_memcache_host_no_port(): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com" ), @@ -2541,7 +2392,7 @@ def test_cloud_memcache_host_no_port(): def test_cloud_memcache_host_with_port(): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com:8000" ), @@ -2595,9 +2446,9 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: + with mock.patch.object(auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2673,7 +2524,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): def test_cloud_memcache_grpc_lro_client(): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2686,7 +2537,7 @@ def test_cloud_memcache_grpc_lro_client(): def test_cloud_memcache_grpc_lro_async_client(): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2701,6 +2552,7 @@ def test_instance_path(): project = "squid" location = "clam" instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @@ -2723,6 +2575,7 @@ def test_parse_instance_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2743,6 +2596,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" + expected = "folders/{folder}".format(folder=folder,) actual = CloudMemcacheClient.common_folder_path(folder) assert expected == actual @@ -2761,6 +2615,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" + expected = "organizations/{organization}".format(organization=organization,) actual = CloudMemcacheClient.common_organization_path(organization) assert expected == actual @@ -2779,6 +2634,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" + expected = "projects/{project}".format(project=project,) actual = CloudMemcacheClient.common_project_path(project) assert expected == actual @@ -2798,6 +2654,7 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" + expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2824,7 +2681,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.CloudMemcacheTransport, "_prep_wrapped_messages" ) as prep: client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2833,6 +2690,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = CloudMemcacheClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/memcache_v1beta2/__init__.py b/tests/unit/gapic/memcache_v1beta2/__init__.py index 4de6597..42ffdf2 100644 --- a/tests/unit/gapic/memcache_v1beta2/__init__.py +++ b/tests/unit/gapic/memcache_v1beta2/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index b991eec..1e242ab 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # + import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -23,16 +24,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from google import auth from google.api_core import client_options -from google.api_core import exceptions as core_exceptions +from google.api_core import exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials +from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.memcache_v1beta2.services.cloud_memcache import ( CloudMemcacheAsyncClient, @@ -40,41 +41,11 @@ from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.services.cloud_memcache import transports -from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( - _API_CORE_VERSION, -) -from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.memcache_v1beta2.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore def client_cert_source_callback(): @@ -125,7 +96,7 @@ def test__get_default_mtls_endpoint(): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() + creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -142,7 +113,7 @@ def test_cloud_memcache_client_from_service_account_info(client_class): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() + creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -195,7 +166,7 @@ def test_cloud_memcache_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + transport = transport_class(credentials=credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -483,7 +454,7 @@ def test_list_instances( transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -496,16 +467,21 @@ def test_list_instances( call.return_value = cloud_memcache.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) + response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @@ -517,7 +493,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -525,6 +501,7 @@ def test_list_instances_empty_call(): client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ListInstancesRequest() @@ -533,7 +510,7 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -549,16 +526,20 @@ async def test_list_instances_async( unreachable=["unreachable_value"], ) ) + response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @@ -568,17 +549,17 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() - request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_memcache.ListInstancesResponse() + client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -593,14 +574,11 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() - request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -608,6 +586,7 @@ async def test_list_instances_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.ListInstancesResponse() ) + await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -621,12 +600,13 @@ async def test_list_instances_field_headers_async(): def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances(parent="parent_value",) @@ -635,11 +615,12 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -651,9 +632,7 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -671,14 +650,13 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -689,7 +667,7 @@ async def test_list_instances_flattened_error_async(): def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -727,7 +705,7 @@ def test_list_instances_pager(): def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -757,7 +735,7 @@ def test_list_instances_pages(): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -794,7 +772,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -830,7 +808,7 @@ def test_get_instance( transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -852,24 +830,37 @@ def test_get_instance( discovery_endpoint="discovery_endpoint_value", update_available=True, ) + response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloud_memcache.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.authorized_network == "authorized_network_value" + assert response.zones == ["zones_value"] + assert response.node_count == 1070 + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + assert response.state == cloud_memcache.Instance.State.CREATING + assert response.memcache_full_version == "memcache_full_version_value" + assert response.discovery_endpoint == "discovery_endpoint_value" + assert response.update_available is True @@ -881,7 +872,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -889,6 +880,7 @@ def test_get_instance_empty_call(): client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.GetInstanceRequest() @@ -897,7 +889,7 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -921,24 +913,36 @@ async def test_get_instance_async( update_available=True, ) ) + response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_memcache.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.authorized_network == "authorized_network_value" + assert response.zones == ["zones_value"] + assert response.node_count == 1070 + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + assert response.state == cloud_memcache.Instance.State.CREATING + assert response.memcache_full_version == "memcache_full_version_value" + assert response.discovery_endpoint == "discovery_endpoint_value" + assert response.update_available is True @@ -948,17 +952,17 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_memcache.Instance() + client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -973,14 +977,11 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -988,6 +989,7 @@ async def test_get_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.Instance() ) + await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1001,12 +1003,13 @@ async def test_get_instance_field_headers_async(): def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.Instance() + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance(name="name_value",) @@ -1015,11 +1018,12 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1031,9 +1035,7 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1051,14 +1053,13 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1072,7 +1073,7 @@ def test_create_instance( transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1083,11 +1084,13 @@ def test_create_instance( with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1102,7 +1105,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1110,6 +1113,7 @@ def test_create_instance_empty_call(): client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.CreateInstanceRequest() @@ -1118,7 +1122,7 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1131,11 +1135,13 @@ async def test_create_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1148,17 +1154,17 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() - request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1173,14 +1179,11 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() - request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1188,6 +1191,7 @@ async def test_create_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1201,12 +1205,13 @@ async def test_create_instance_field_headers_async(): def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( @@ -1219,13 +1224,16 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].instance_id == "instance_id_value" + assert args[0].resource == cloud_memcache.Instance(name="name_value") def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1240,9 +1248,7 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1264,16 +1270,17 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].instance_id == "instance_id_value" + assert args[0].resource == cloud_memcache.Instance(name="name_value") @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1290,7 +1297,7 @@ def test_update_instance( transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1301,11 +1308,13 @@ def test_update_instance( with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1320,7 +1329,7 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1328,6 +1337,7 @@ def test_update_instance_empty_call(): client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateInstanceRequest() @@ -1336,7 +1346,7 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1349,11 +1359,13 @@ async def test_update_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1366,17 +1378,17 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() - request.resource.name = "resource.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1393,14 +1405,11 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() - request.resource.name = "resource.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1408,6 +1417,7 @@ async def test_update_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1423,16 +1433,17 @@ async def test_update_instance_field_headers_async(): def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1440,28 +1451,28 @@ def test_update_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].resource == cloud_memcache.Instance(name="name_value") def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_instance( cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @pytest.mark.asyncio async def test_update_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1474,7 +1485,7 @@ async def test_update_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1482,22 +1493,22 @@ async def test_update_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].resource == cloud_memcache.Instance(name="name_value") @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_instance( cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1506,7 +1517,7 @@ def test_update_parameters( transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1519,11 +1530,13 @@ def test_update_parameters( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1538,7 +1551,7 @@ def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1548,6 +1561,7 @@ def test_update_parameters_empty_call(): client.update_parameters() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateParametersRequest() @@ -1556,7 +1570,7 @@ async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1571,11 +1585,13 @@ async def test_update_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1588,12 +1604,11 @@ async def test_update_parameters_async_from_dict(): def test_update_parameters_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1601,6 +1616,7 @@ def test_update_parameters_field_headers(): type(client.transport.update_parameters), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1615,14 +1631,11 @@ def test_update_parameters_field_headers(): @pytest.mark.asyncio async def test_update_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1632,6 +1645,7 @@ async def test_update_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1645,7 +1659,7 @@ async def test_update_parameters_field_headers_async(): def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1653,11 +1667,12 @@ def test_update_parameters_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_parameters( name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1665,13 +1680,16 @@ def test_update_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1679,16 +1697,14 @@ def test_update_parameters_flattened_error(): client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @pytest.mark.asyncio async def test_update_parameters_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1704,7 +1720,7 @@ async def test_update_parameters_flattened_async(): # using the keyword arguments to the method. response = await client.update_parameters( name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1712,16 +1728,17 @@ async def test_update_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") @pytest.mark.asyncio async def test_update_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1729,7 +1746,7 @@ async def test_update_parameters_flattened_error_async(): await client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + update_mask=field_mask.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1738,7 +1755,7 @@ def test_delete_instance( transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1749,11 +1766,13 @@ def test_delete_instance( with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1768,7 +1787,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1776,6 +1795,7 @@ def test_delete_instance_empty_call(): client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.DeleteInstanceRequest() @@ -1784,7 +1804,7 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1797,11 +1817,13 @@ async def test_delete_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1814,17 +1836,17 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1839,14 +1861,11 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1854,6 +1873,7 @@ async def test_delete_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1867,12 +1887,13 @@ async def test_delete_instance_field_headers_async(): def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance(name="name_value",) @@ -1881,11 +1902,12 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1897,9 +1919,7 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1917,14 +1937,13 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1938,7 +1957,7 @@ def test_apply_parameters( transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1949,11 +1968,13 @@ def test_apply_parameters( with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -1968,7 +1989,7 @@ def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1976,6 +1997,7 @@ def test_apply_parameters_empty_call(): client.apply_parameters() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplyParametersRequest() @@ -1984,7 +2006,7 @@ async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1997,11 +2019,13 @@ async def test_apply_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -2014,17 +2038,17 @@ async def test_apply_parameters_async_from_dict(): def test_apply_parameters_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2039,14 +2063,11 @@ def test_apply_parameters_field_headers(): @pytest.mark.asyncio async def test_apply_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() - request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2054,6 +2075,7 @@ async def test_apply_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2067,12 +2089,13 @@ async def test_apply_parameters_field_headers_async(): def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_parameters( @@ -2083,13 +2106,16 @@ def test_apply_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + assert args[0].node_ids == ["node_ids_value"] + assert args[0].apply_all == True def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2104,9 +2130,7 @@ def test_apply_parameters_flattened_error(): @pytest.mark.asyncio async def test_apply_parameters_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2126,16 +2150,17 @@ async def test_apply_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + assert args[0].node_ids == ["node_ids_value"] + assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2152,7 +2177,7 @@ def test_apply_software_update( transport: str = "grpc", request_type=cloud_memcache.ApplySoftwareUpdateRequest ): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2165,11 +2190,13 @@ def test_apply_software_update( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() # Establish that the response is the type that we expect. @@ -2184,7 +2211,7 @@ def test_apply_software_update_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2194,6 +2221,7 @@ def test_apply_software_update_empty_call(): client.apply_software_update() call.assert_called() _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() @@ -2203,7 +2231,7 @@ async def test_apply_software_update_async( request_type=cloud_memcache.ApplySoftwareUpdateRequest, ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2218,11 +2246,13 @@ async def test_apply_software_update_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) + response = await client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() # Establish that the response is the type that we expect. @@ -2235,12 +2265,11 @@ async def test_apply_software_update_async_from_dict(): def test_apply_software_update_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplySoftwareUpdateRequest() - request.instance = "instance/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2248,6 +2277,7 @@ def test_apply_software_update_field_headers(): type(client.transport.apply_software_update), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") + client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. @@ -2262,14 +2292,11 @@ def test_apply_software_update_field_headers(): @pytest.mark.asyncio async def test_apply_software_update_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplySoftwareUpdateRequest() - request.instance = "instance/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2279,6 +2306,7 @@ async def test_apply_software_update_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) + await client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. @@ -2292,7 +2320,7 @@ async def test_apply_software_update_field_headers_async(): def test_apply_software_update_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2300,6 +2328,7 @@ def test_apply_software_update_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_software_update( @@ -2310,13 +2339,16 @@ def test_apply_software_update_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + assert args[0].instance == "instance_value" + assert args[0].node_ids == ["node_ids_value"] + assert args[0].apply_all == True def test_apply_software_update_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2331,9 +2363,7 @@ def test_apply_software_update_flattened_error(): @pytest.mark.asyncio async def test_apply_software_update_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2355,16 +2385,17 @@ async def test_apply_software_update_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + assert args[0].instance == "instance_value" + assert args[0].node_ids == ["node_ids_value"] + assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_software_update_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2380,16 +2411,16 @@ async def test_apply_software_update_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2399,7 +2430,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2410,7 +2441,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) client = CloudMemcacheClient(transport=transport) assert client.transport is transport @@ -2419,13 +2450,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2440,23 +2471,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) def test_cloud_memcache_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): + with pytest.raises(exceptions.DuplicateCredentialArgs): transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2468,7 +2499,7 @@ def test_cloud_memcache_base_transport(): ) as Transport: Transport.return_value = None transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2493,37 +2524,15 @@ def test_cloud_memcache_base_transport(): transport.operations_client -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True + auth, "load_credentials_from_file" ) as load_creds, mock.patch( "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2536,33 +2545,19 @@ def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): def test_cloud_memcache_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + with mock.patch.object(auth, "default") as adc, mock.patch( "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport() adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudMemcacheClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) CloudMemcacheClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2570,156 +2565,20 @@ def test_cloud_memcache_auth_adc_old_google_auth(): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_cloud_memcache_transport_auth_adc(transport_class): +def test_cloud_memcache_transport_auth_adc(): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_gte_1_26_0 -def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="memcache.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_memcache_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_memcache_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2728,7 +2587,7 @@ def test_cloud_memcache_transport_create_channel_user_scopes( ], ) def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() + cred = credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2767,7 +2626,7 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla def test_cloud_memcache_host_no_port(): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com" ), @@ -2777,7 +2636,7 @@ def test_cloud_memcache_host_no_port(): def test_cloud_memcache_host_with_port(): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com:8000" ), @@ -2831,9 +2690,9 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: + with mock.patch.object(auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2909,7 +2768,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): def test_cloud_memcache_grpc_lro_client(): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2922,7 +2781,7 @@ def test_cloud_memcache_grpc_lro_client(): def test_cloud_memcache_grpc_lro_async_client(): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2937,6 +2796,7 @@ def test_instance_path(): project = "squid" location = "clam" instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @@ -2959,6 +2819,7 @@ def test_parse_instance_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2979,6 +2840,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" + expected = "folders/{folder}".format(folder=folder,) actual = CloudMemcacheClient.common_folder_path(folder) assert expected == actual @@ -2997,6 +2859,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" + expected = "organizations/{organization}".format(organization=organization,) actual = CloudMemcacheClient.common_organization_path(organization) assert expected == actual @@ -3015,6 +2878,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" + expected = "projects/{project}".format(project=project,) actual = CloudMemcacheClient.common_project_path(project) assert expected == actual @@ -3034,6 +2898,7 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" + expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -3060,7 +2925,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.CloudMemcacheTransport, "_prep_wrapped_messages" ) as prep: client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3069,6 +2934,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = CloudMemcacheClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) From 829a7b7b0cfedb3a18a61158d7aa949b178ae4fe Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 20 May 2021 16:28:04 -0600 Subject: [PATCH 023/159] chore: release 1.0.0 (#62) Follow up to #59. PR is intentionally empty. Release-As: 1.0.0 From 97eaa477f306eca02ef43622c9700d7761ecf6c3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 09:28:08 +0000 Subject: [PATCH 024/159] chore: new owl bot post processor docker image (#64) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 --- .github/.OwlBot.lock.yaml | 2 +- noxfile.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 864c176..46e3f02 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa + digest: sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 diff --git a/noxfile.py b/noxfile.py index 04c230d..d896e7f 100644 --- a/noxfile.py +++ b/noxfile.py @@ -179,7 +179,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -201,7 +201,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") + session.install( + "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( From be4ad25218ce452a4485bc1444dd00e4ed9ceddd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 28 May 2021 15:52:07 +0000 Subject: [PATCH 025/159] chore: new owl bot post processor docker image (#65) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:0856ca711da1fd5ec9d6d7da6c50aa0bbf550fb94acb47b55159a640791987bf --- .github/.OwlBot.lock.yaml | 2 +- docs/multiprocessing.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 46e3f02..127c2cd 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 + digest: sha256:0856ca711da1fd5ec9d6d7da6c50aa0bbf550fb94acb47b55159a640791987bf diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst index 1cb29d4..536d17b 100644 --- a/docs/multiprocessing.rst +++ b/docs/multiprocessing.rst @@ -1,7 +1,7 @@ .. note:: - Because this client uses :mod:`grpcio` library, it is safe to + Because this client uses :mod:`grpc` library, it is safe to share instances across threads. In multiprocessing scenarios, the best practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.Pool` or + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or :class:`multiprocessing.Process`. From 646673002e23a2582d134c88f6d8f8a6b985d4be Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 28 May 2021 18:28:03 +0000 Subject: [PATCH 026/159] chore: new owl bot post processor docker image (#66) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 127c2cd..da616c9 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:0856ca711da1fd5ec9d6d7da6c50aa0bbf550fb94acb47b55159a640791987bf + digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 diff --git a/docs/conf.py b/docs/conf.py index 2559e64..7bc36e4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -363,6 +363,7 @@ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } From 81a9a4e87610eb6f0ec593059dfdd5194367eb40 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 1 Jun 2021 10:56:01 +0000 Subject: [PATCH 027/159] chore: release 1.0.0 (#63) :robot: I have created a release \*beep\* \*boop\* --- ## [1.0.0](https://www.github.com/googleapis/python-memcache/compare/v0.3.0...v1.0.0) (2021-05-28) ### Features * bump release level to production/stable ([#59](https://www.github.com/googleapis/python-memcache/issues/59)) ([b8d9394](https://www.github.com/googleapis/python-memcache/commit/b8d9394dd34b97ddd68f8c73a5f516ba5294a70c)) * support self-signed JWT flow for service accounts ([2ad1bfb](https://www.github.com/googleapis/python-memcache/commit/2ad1bfbee1f847c1b150b0e1595faba63f42d768)) ### Bug Fixes * add async client to %name_%version/init.py ([2ad1bfb](https://www.github.com/googleapis/python-memcache/commit/2ad1bfbee1f847c1b150b0e1595faba63f42d768)) ### Miscellaneous Chores * release 1.0.0 ([#62](https://www.github.com/googleapis/python-memcache/issues/62)) ([829a7b7](https://www.github.com/googleapis/python-memcache/commit/829a7b7b0cfedb3a18a61158d7aa949b178ae4fe)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 18 ++++++++++++++++++ setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1c6a712..cd1c4f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [1.0.0](https://www.github.com/googleapis/python-memcache/compare/v0.3.0...v1.0.0) (2021-05-28) + + +### Features + +* bump release level to production/stable ([#59](https://www.github.com/googleapis/python-memcache/issues/59)) ([b8d9394](https://www.github.com/googleapis/python-memcache/commit/b8d9394dd34b97ddd68f8c73a5f516ba5294a70c)) +* support self-signed JWT flow for service accounts ([2ad1bfb](https://www.github.com/googleapis/python-memcache/commit/2ad1bfbee1f847c1b150b0e1595faba63f42d768)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([2ad1bfb](https://www.github.com/googleapis/python-memcache/commit/2ad1bfbee1f847c1b150b0e1595faba63f42d768)) + + +### Miscellaneous Chores + +* release 1.0.0 ([#62](https://www.github.com/googleapis/python-memcache/issues/62)) ([829a7b7](https://www.github.com/googleapis/python-memcache/commit/829a7b7b0cfedb3a18a61158d7aa949b178ae4fe)) + ## [0.3.0](https://www.github.com/googleapis/python-memcache/compare/v0.2.0...v0.3.0) (2021-02-10) diff --git a/setup.py b/setup.py index e9985ee..f8353b2 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "0.3.0" +version = "1.0.0" package_root = os.path.abspath(os.path.dirname(__file__)) From 4134a6237f21ed107da7980253470645b59d9dbd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 17 Jun 2021 10:58:09 +0000 Subject: [PATCH 028/159] chore: new owl bot post processor docker image (#69) Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index da616c9..ea06d39 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 + digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce diff --git a/docs/conf.py b/docs/conf.py index 7bc36e4..c592f75 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -80,9 +80,9 @@ master_doc = "index" # General information about the project. -project = u"google-cloud-memcache" -copyright = u"2019, Google" -author = u"Google APIs" +project = "google-cloud-memcache" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -281,7 +281,7 @@ ( master_doc, "google-cloud-memcache.tex", - u"google-cloud-memcache Documentation", + "google-cloud-memcache Documentation", author, "manual", ) @@ -316,7 +316,7 @@ ( master_doc, "google-cloud-memcache", - u"google-cloud-memcache Documentation", + "google-cloud-memcache Documentation", [author], 1, ) @@ -335,7 +335,7 @@ ( master_doc, "google-cloud-memcache", - u"google-cloud-memcache Documentation", + "google-cloud-memcache Documentation", author, "google-cloud-memcache", "google-cloud-memcache Library", From f273025fedad32be0b766e40ab99b445f529cd13 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 19 Jun 2021 01:54:05 +0000 Subject: [PATCH 029/159] docs: omit mention of Python 2.7 in 'CONTRIBUTING.rst' (#1127) (#70) Source-Link: https://github.com/googleapis/synthtool/commit/b91f129527853d5b756146a0b5044481fb4e09a8 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd --- .github/.OwlBot.lock.yaml | 2 +- CONTRIBUTING.rst | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ea06d39..cc49c6a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce + digest: sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f45c3c2..a1bcedd 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -69,7 +69,6 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: - $ nox -s unit-2.7 $ nox -s unit-3.8 $ ... @@ -144,7 +143,6 @@ Running System Tests # Run all system tests $ nox -s system-3.8 - $ nox -s system-2.7 # Run a single system test $ nox -s system-3.8 -- -k @@ -152,9 +150,8 @@ Running System Tests .. note:: - System tests are only configured to run under Python 2.7 and - Python 3.8. For expediency, we do not run them in older versions - of Python 3. + System tests are only configured to run under Python 3.8. + For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local auth settings and change some configuration in your project to From c3b27e6034576ce1d01f46485b02592be50ed02c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 20 Jun 2021 14:08:02 +0000 Subject: [PATCH 030/159] chore: update precommit hook pre-commit/pre-commit-hooks to v4 (#1083) (#72) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pre-commit/pre-commit-hooks](https://togithub.com/pre-commit/pre-commit-hooks) | repository | major | `v3.4.0` -> `v4.0.1` | --- ### Release Notes
pre-commit/pre-commit-hooks ### [`v4.0.1`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.1) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v4.0.0...v4.0.1) ##### Fixes - `check-shebang-scripts-are-executable` fix entry point. - [#​602](https://togithub.com/pre-commit/pre-commit-hooks/issues/602) issue by [@​Person-93](https://togithub.com/Person-93). - [#​603](https://togithub.com/pre-commit/pre-commit-hooks/issues/603) PR by [@​scop](https://togithub.com/scop). ### [`v4.0.0`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.0) [Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v3.4.0...v4.0.0) ##### Features - `check-json`: report duplicate keys. - [#​558](https://togithub.com/pre-commit/pre-commit-hooks/issues/558) PR by [@​AdityaKhursale](https://togithub.com/AdityaKhursale). - [#​554](https://togithub.com/pre-commit/pre-commit-hooks/issues/554) issue by [@​adamchainz](https://togithub.com/adamchainz). - `no-commit-to-branch`: add `main` to default blocked branches. - [#​565](https://togithub.com/pre-commit/pre-commit-hooks/issues/565) PR by [@​ndevenish](https://togithub.com/ndevenish). - `check-case-conflict`: check conflicts in directory names as well. - [#​575](https://togithub.com/pre-commit/pre-commit-hooks/issues/575) PR by [@​slsyy](https://togithub.com/slsyy). - [#​70](https://togithub.com/pre-commit/pre-commit-hooks/issues/70) issue by [@​andyjack](https://togithub.com/andyjack). - `check-vcs-permalinks`: forbid other branch names. - [#​582](https://togithub.com/pre-commit/pre-commit-hooks/issues/582) PR by [@​jack1142](https://togithub.com/jack1142). - [#​581](https://togithub.com/pre-commit/pre-commit-hooks/issues/581) issue by [@​jack1142](https://togithub.com/jack1142). - `check-shebang-scripts-are-executable`: new hook which ensures shebang'd scripts are executable. - [#​545](https://togithub.com/pre-commit/pre-commit-hooks/issues/545) PR by [@​scop](https://togithub.com/scop). ##### Fixes - `check-executables-have-shebangs`: Short circuit shebang lookup on windows. - [#​544](https://togithub.com/pre-commit/pre-commit-hooks/issues/544) PR by [@​scop](https://togithub.com/scop). - `requirements-txt-fixer`: Fix comments which have indentation - [#​549](https://togithub.com/pre-commit/pre-commit-hooks/issues/549) PR by [@​greshilov](https://togithub.com/greshilov). - [#​548](https://togithub.com/pre-commit/pre-commit-hooks/issues/548) issue by [@​greshilov](https://togithub.com/greshilov). - `pretty-format-json`: write to stdout using UTF-8 encoding. - [#​571](https://togithub.com/pre-commit/pre-commit-hooks/issues/571) PR by [@​jack1142](https://togithub.com/jack1142). - [#​570](https://togithub.com/pre-commit/pre-commit-hooks/issues/570) issue by [@​jack1142](https://togithub.com/jack1142). - Use more inclusive language. - [#​599](https://togithub.com/pre-commit/pre-commit-hooks/issues/599) PR by [@​asottile](https://togithub.com/asottile). ##### Breaking changes - Remove deprecated hooks: `flake8`, `pyflakes`, `autopep8-wrapper`. - [#​597](https://togithub.com/pre-commit/pre-commit-hooks/issues/597) PR by [@​asottile](https://togithub.com/asottile).
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚩 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool). Source-Link: https://github.com/googleapis/synthtool/commit/333fd90856f1454380514bc59fc0936cdaf1c202 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 --- .github/.OwlBot.lock.yaml | 2 +- .pre-commit-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index cc49c6a..9602d54 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd + digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f00c7c..62eb5a7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.0.1 hooks: - id: trailing-whitespace - id: end-of-file-fixer From add45bcd3c59dcba08f30acf5fc1e36541474182 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 22 Jun 2021 20:08:28 +0000 Subject: [PATCH 031/159] chore: add kokoro 3.9 config templates (#1128) (#74) Source-Link: https://github.com/googleapis/synthtool/commit/b0eb8a8b30b46a3c98d23c23107acb748c6601a1 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/samples/python3.9/common.cfg | 40 +++++++++++++++++++++ .kokoro/samples/python3.9/continuous.cfg | 6 ++++ .kokoro/samples/python3.9/periodic-head.cfg | 11 ++++++ .kokoro/samples/python3.9/periodic.cfg | 6 ++++ .kokoro/samples/python3.9/presubmit.cfg | 6 ++++ 6 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 .kokoro/samples/python3.9/common.cfg create mode 100644 .kokoro/samples/python3.9/continuous.cfg create mode 100644 .kokoro/samples/python3.9/periodic-head.cfg create mode 100644 .kokoro/samples/python3.9/periodic.cfg create mode 100644 .kokoro/samples/python3.9/presubmit.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 9602d54..0954585 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 + digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg new file mode 100644 index 0000000..7d86218 --- /dev/null +++ b/.kokoro/samples/python3.9/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.9" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py39" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.9/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.9/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/python3.9/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.9/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file From f9131120fdf755b8bbb0f3ca28154d7cc15380d9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Jun 2021 11:40:09 +0000 Subject: [PATCH 032/159] chore(python): simplify nox steps in CONTRIBUTING.rst (#76) Source-Link: https://github.com/googleapis/synthtool/commit/26558bae8976a985d73c2d98c31d8612273f907d Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 --- .github/.OwlBot.lock.yaml | 2 +- CONTRIBUTING.rst | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 0954585..e2b39f9 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6 + digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a1bcedd..9b2a1ca 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -68,14 +68,12 @@ Using ``nox`` We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: + $ nox -s unit - $ nox -s unit-3.8 - $ ... +- To run a single unit test:: -- Args to pytest can be passed through the nox command separated by a `--`. For - example, to run a single test:: + $ nox -s unit-3.9 -- -k - $ nox -s unit-3.8 -- -k .. note:: @@ -142,7 +140,7 @@ Running System Tests - To run system tests, you can execute:: # Run all system tests - $ nox -s system-3.8 + $ nox -s system # Run a single system test $ nox -s system-3.8 -- -k @@ -215,8 +213,8 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-memcache/blob/master/noxfile.py -We also explicitly decided to support Python 3 beginning with version -3.6. Reasons for this include: +We also explicitly decided to support Python 3 beginning with version 3.6. +Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ From 2d1aaf439d096857a727752ae129852b279c3658 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 26 Jun 2021 07:52:23 -0400 Subject: [PATCH 033/159] chore: upgrade gapic-generator-python to 0.46.3 (#67) This PR is the result of manually running the following commands in order to pull in the latest changes from googleapis-gen which includes gapic generator version 0.46.3: 1. Download the latest docker image for owlbot-cli ``` docker pull gcr.io/repo-automation-bots/owlbot-cli:latest ``` 2. Run the owlbot-cli to copy the generated client from googleapis/googleapis-gen ``` docker run --rm --user $(id -u):$(id -g) -v $(pwd):/repo -w /repo gcr.io/repo-automation-bots/owlbot-cli:latest copy-code ``` 3. Download the latest docker image for owlbot-python ``` docker pull gcr.io/repo-automation-bots/owlbot-python:latest ``` 4. Run the post processor image ``` docker run --user $(id -u):$(id -g) --rm -v $(pwd):/repo -w /repo gcr.io/repo-automation-bots/owlbot-python:latest ``` Other features/fixes include: fix: add async client to %name_%version/init.py chore: add autogenerated snippets chore: remove auth, policy, and options from the reserved names list feat: support self-signed JWT flow for service accounts chore: enable GAPIC metadata generation chore: sort subpackages in %namespace/%name/init.py --- docs/memcache_v1/cloud_memcache.rst | 1 - docs/memcache_v1beta2/cloud_memcache.rst | 1 - google/cloud/memcache/__init__.py | 12 +- google/cloud/memcache_v1/__init__.py | 9 +- google/cloud/memcache_v1/gapic_metadata.json | 93 +++ google/cloud/memcache_v1/services/__init__.py | 1 - .../services/cloud_memcache/__init__.py | 2 - .../services/cloud_memcache/async_client.py | 51 +- .../services/cloud_memcache/client.py | 94 +-- .../services/cloud_memcache/pagers.py | 4 +- .../cloud_memcache/transports/__init__.py | 2 - .../cloud_memcache/transports/base.py | 132 +++- .../cloud_memcache/transports/grpc.py | 44 +- .../cloud_memcache/transports/grpc_asyncio.py | 45 +- google/cloud/memcache_v1/types/__init__.py | 2 - .../cloud/memcache_v1/types/cloud_memcache.py | 151 ++-- google/cloud/memcache_v1beta2/__init__.py | 9 +- .../memcache_v1beta2/gapic_metadata.json | 103 +++ .../memcache_v1beta2/services/__init__.py | 1 - .../services/cloud_memcache/__init__.py | 2 - .../services/cloud_memcache/async_client.py | 54 +- .../services/cloud_memcache/client.py | 97 +-- .../services/cloud_memcache/pagers.py | 4 +- .../cloud_memcache/transports/__init__.py | 2 - .../cloud_memcache/transports/base.py | 136 ++-- .../cloud_memcache/transports/grpc.py | 50 +- .../cloud_memcache/transports/grpc_asyncio.py | 49 +- .../cloud/memcache_v1beta2/types/__init__.py | 2 - .../memcache_v1beta2/types/cloud_memcache.py | 166 ++--- scripts/fixup_memcache_v1_keywords.py | 19 +- scripts/fixup_memcache_v1beta2_keywords.py | 21 +- setup.py | 1 + testing/constraints-3.6.txt | 2 + tests/__init__.py | 15 + tests/unit/__init__.py | 15 + tests/unit/gapic/__init__.py | 15 + tests/unit/gapic/memcache_v1/__init__.py | 1 - .../gapic/memcache_v1/test_cloud_memcache.py | 615 +++++++++------- tests/unit/gapic/memcache_v1beta2/__init__.py | 1 - .../memcache_v1beta2/test_cloud_memcache.py | 657 +++++++++++------- 40 files changed, 1558 insertions(+), 1123 deletions(-) create mode 100644 google/cloud/memcache_v1/gapic_metadata.json create mode 100644 google/cloud/memcache_v1beta2/gapic_metadata.json create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/gapic/__init__.py diff --git a/docs/memcache_v1/cloud_memcache.rst b/docs/memcache_v1/cloud_memcache.rst index 0c21866..35de375 100644 --- a/docs/memcache_v1/cloud_memcache.rst +++ b/docs/memcache_v1/cloud_memcache.rst @@ -5,7 +5,6 @@ CloudMemcache :members: :inherited-members: - .. automodule:: google.cloud.memcache_v1.services.cloud_memcache.pagers :members: :inherited-members: diff --git a/docs/memcache_v1beta2/cloud_memcache.rst b/docs/memcache_v1beta2/cloud_memcache.rst index b20fc3a..7dc7a82 100644 --- a/docs/memcache_v1beta2/cloud_memcache.rst +++ b/docs/memcache_v1beta2/cloud_memcache.rst @@ -5,7 +5,6 @@ CloudMemcache :members: :inherited-members: - .. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache.pagers :members: :inherited-members: diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index 4075bad..f2b7c14 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,10 +14,11 @@ # limitations under the License. # +from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( CloudMemcacheAsyncClient, ) -from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient + from google.cloud.memcache_v1.types.cloud_memcache import ApplyParametersRequest from google.cloud.memcache_v1.types.cloud_memcache import CreateInstanceRequest from google.cloud.memcache_v1.types.cloud_memcache import DeleteInstanceRequest @@ -27,15 +27,15 @@ from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesRequest from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesResponse from google.cloud.memcache_v1.types.cloud_memcache import MemcacheParameters -from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion from google.cloud.memcache_v1.types.cloud_memcache import OperationMetadata from google.cloud.memcache_v1.types.cloud_memcache import UpdateInstanceRequest from google.cloud.memcache_v1.types.cloud_memcache import UpdateParametersRequest +from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion __all__ = ( - "ApplyParametersRequest", - "CloudMemcacheAsyncClient", "CloudMemcacheClient", + "CloudMemcacheAsyncClient", + "ApplyParametersRequest", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -43,8 +43,8 @@ "ListInstancesRequest", "ListInstancesResponse", "MemcacheParameters", - "MemcacheVersion", "OperationMetadata", "UpdateInstanceRequest", "UpdateParametersRequest", + "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py index 4d28d1b..7d3016d 100644 --- a/google/cloud/memcache_v1/__init__.py +++ b/google/cloud/memcache_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +15,8 @@ # from .services.cloud_memcache import CloudMemcacheClient +from .services.cloud_memcache import CloudMemcacheAsyncClient + from .types.cloud_memcache import ApplyParametersRequest from .types.cloud_memcache import CreateInstanceRequest from .types.cloud_memcache import DeleteInstanceRequest @@ -24,14 +25,15 @@ from .types.cloud_memcache import ListInstancesRequest from .types.cloud_memcache import ListInstancesResponse from .types.cloud_memcache import MemcacheParameters -from .types.cloud_memcache import MemcacheVersion from .types.cloud_memcache import OperationMetadata from .types.cloud_memcache import UpdateInstanceRequest from .types.cloud_memcache import UpdateParametersRequest - +from .types.cloud_memcache import MemcacheVersion __all__ = ( + "CloudMemcacheAsyncClient", "ApplyParametersRequest", + "CloudMemcacheClient", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -43,5 +45,4 @@ "OperationMetadata", "UpdateInstanceRequest", "UpdateParametersRequest", - "CloudMemcacheClient", ) diff --git a/google/cloud/memcache_v1/gapic_metadata.json b/google/cloud/memcache_v1/gapic_metadata.json new file mode 100644 index 0000000..08d37fa --- /dev/null +++ b/google/cloud/memcache_v1/gapic_metadata.json @@ -0,0 +1,93 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.memcache_v1", + "protoPackage": "google.cloud.memcache.v1", + "schema": "1.0", + "services": { + "CloudMemcache": { + "clients": { + "grpc": { + "libraryClient": "CloudMemcacheClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudMemcacheAsyncClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/memcache_v1/services/__init__.py b/google/cloud/memcache_v1/services/__init__.py index 42ffdf2..4de6597 100644 --- a/google/cloud/memcache_v1/services/__init__.py +++ b/google/cloud/memcache_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py index 8524cb4..efb245e 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import CloudMemcacheClient from .async_client import CloudMemcacheAsyncClient diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index b09fddb..738a2db 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,20 +20,19 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.types import cloud_memcache -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport from .client import CloudMemcacheClient @@ -70,31 +67,26 @@ class CloudMemcacheAsyncClient: instance_path = staticmethod(CloudMemcacheClient.instance_path) parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) - common_billing_account_path = staticmethod( CloudMemcacheClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( CloudMemcacheClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) parse_common_folder_path = staticmethod( CloudMemcacheClient.parse_common_folder_path ) - common_organization_path = staticmethod( CloudMemcacheClient.common_organization_path ) parse_common_organization_path = staticmethod( CloudMemcacheClient.parse_common_organization_path ) - common_project_path = staticmethod(CloudMemcacheClient.common_project_path) parse_common_project_path = staticmethod( CloudMemcacheClient.parse_common_project_path ) - common_location_path = staticmethod(CloudMemcacheClient.common_location_path) parse_common_location_path = staticmethod( CloudMemcacheClient.parse_common_location_path @@ -102,7 +94,8 @@ class CloudMemcacheAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -117,7 +110,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -134,7 +127,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: CloudMemcacheTransport: The transport used by the client instance. @@ -148,12 +141,12 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cloud memcache client. + """Instantiates the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -185,7 +178,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = CloudMemcacheClient( credentials=credentials, transport=transport, @@ -217,7 +209,6 @@ async def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -247,7 +238,6 @@ async def list_instances( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -301,7 +291,6 @@ async def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -326,7 +315,6 @@ async def get_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -398,7 +386,6 @@ async def create_instance( This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -427,7 +414,6 @@ async def create_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if instance is not None: @@ -468,7 +454,7 @@ async def update_instance( request: cloud_memcache.UpdateInstanceRequest = None, *, instance: cloud_memcache.Instance = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -495,7 +481,6 @@ async def update_instance( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -524,7 +509,6 @@ async def update_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: request.instance = instance if update_mask is not None: @@ -565,7 +549,7 @@ async def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -600,7 +584,6 @@ async def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -629,7 +612,6 @@ async def update_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if update_mask is not None: @@ -689,7 +671,6 @@ async def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -729,7 +710,6 @@ async def delete_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -754,7 +734,7 @@ async def delete_instance( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -806,7 +786,6 @@ async def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -835,12 +814,10 @@ async def apply_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if apply_all is not None: request.apply_all = apply_all - if node_ids: request.node_ids.extend(node_ids) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 591ee49..3152eee 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,10 +34,9 @@ from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.types import cloud_memcache -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport @@ -58,7 +55,7 @@ class CloudMemcacheClientMeta(type): _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -100,7 +97,8 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -134,7 +132,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -151,7 +150,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -170,23 +169,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - CloudMemcacheTransport: The transport used by the client instance. + CloudMemcacheTransport: The transport used by the client + instance. """ return self._transport @staticmethod def instance_path(project: str, location: str, instance: str,) -> str: - """Return a fully-qualified instance string.""" + """Returns a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @staticmethod def parse_instance_path(path: str) -> Dict[str, str]: - """Parse a instance path into its component segments.""" + """Parses a instance path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path, @@ -195,7 +195,7 @@ def parse_instance_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -208,7 +208,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -219,7 +219,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -230,7 +230,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -241,7 +241,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -255,12 +255,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cloud memcache client. + """Instantiates the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -315,9 +315,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -329,12 +330,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -349,8 +352,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -389,7 +392,6 @@ def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -421,10 +423,8 @@ def list_instances( # there are no flattened fields. if not isinstance(request, cloud_memcache.ListInstancesRequest): request = cloud_memcache.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -474,7 +474,6 @@ def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -501,10 +500,8 @@ def get_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.GetInstanceRequest): request = cloud_memcache.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -572,7 +569,6 @@ def create_instance( This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -603,10 +599,8 @@ def create_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.CreateInstanceRequest): request = cloud_memcache.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if instance is not None: @@ -643,7 +637,7 @@ def update_instance( request: cloud_memcache.UpdateInstanceRequest = None, *, instance: cloud_memcache.Instance = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -670,7 +664,6 @@ def update_instance( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -701,10 +694,8 @@ def update_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateInstanceRequest): request = cloud_memcache.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: request.instance = instance if update_mask is not None: @@ -741,7 +732,7 @@ def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -776,7 +767,6 @@ def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -807,10 +797,8 @@ def update_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateParametersRequest): request = cloud_memcache.UpdateParametersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if update_mask is not None: @@ -866,7 +854,6 @@ def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -908,10 +895,8 @@ def delete_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.DeleteInstanceRequest): request = cloud_memcache.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -932,7 +917,7 @@ def delete_instance( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -984,7 +969,6 @@ def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1015,10 +999,8 @@ def apply_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplyParametersRequest): request = cloud_memcache.ApplyParametersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if node_ids is not None: diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py index 7a1324e..7723778 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py index 38122c6..32ad848 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index c9b57c7..393f376 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -37,27 +36,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "memcache.googleapis.com" + def __init__( self, *, - host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -66,7 +79,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,29 +93,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -137,11 +197,11 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_instances( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ListInstancesRequest], - typing.Union[ + Union[ cloud_memcache.ListInstancesResponse, - typing.Awaitable[cloud_memcache.ListInstancesResponse], + Awaitable[cloud_memcache.ListInstancesResponse], ], ]: raise NotImplementedError() @@ -149,56 +209,54 @@ def list_instances( @property def get_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.GetInstanceRequest], - typing.Union[ - cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] - ], + Union[cloud_memcache.Instance, Awaitable[cloud_memcache.Instance]], ]: raise NotImplementedError() @property def create_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.CreateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.UpdateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_parameters( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.UpdateParametersRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.DeleteInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def apply_parameters( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ApplyParametersRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 61c7dde..5467640 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO @@ -70,7 +67,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -84,7 +81,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -195,7 +193,7 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -226,13 +224,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -313,7 +313,7 @@ def get_instance( @property def create_instance( self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a new Instance in a given location. @@ -332,14 +332,14 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @property def update_instance( self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. Updates an existing Instance in a given project and @@ -359,14 +359,14 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_instance"] @property def update_parameters( self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: r"""Return a callable for the update parameters method over gRPC. Updates the defined Memcached Parameters for an @@ -388,14 +388,14 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_parameters"] @property def delete_instance( self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a single Instance. @@ -414,14 +414,14 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @property def apply_parameters( self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: r"""Return a callable for the apply parameters method over gRPC. ApplyParameters will restart the set of specified @@ -442,7 +442,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_parameters"] diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index d669536..b21cfd3 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .grpc import CloudMemcacheGrpcTransport @@ -73,7 +70,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -100,13 +97,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -114,7 +113,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -128,7 +127,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -187,7 +187,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -323,7 +322,7 @@ def get_instance( def create_instance( self, ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.CreateInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the create instance method over gRPC. @@ -343,7 +342,7 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @@ -351,7 +350,7 @@ def create_instance( def update_instance( self, ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update instance method over gRPC. @@ -372,7 +371,7 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_instance"] @@ -380,7 +379,7 @@ def update_instance( def update_parameters( self, ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] + [cloud_memcache.UpdateParametersRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update parameters method over gRPC. @@ -403,7 +402,7 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_parameters"] @@ -411,7 +410,7 @@ def update_parameters( def delete_instance( self, ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the delete instance method over gRPC. @@ -431,7 +430,7 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @@ -439,7 +438,7 @@ def delete_instance( def apply_parameters( self, ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] + [cloud_memcache.ApplyParametersRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the apply parameters method over gRPC. @@ -461,7 +460,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_parameters"] diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index 29acce6..2430991 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .cloud_memcache import ( ApplyParametersRequest, CreateInstanceRequest, diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index a148657..2a6777e 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -49,7 +46,6 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): r""" - Attributes: name (str): Required. Unique name of the resource in this scope @@ -133,7 +129,6 @@ class State(proto.Enum): class NodeConfig(proto.Message): r"""Configuration for a Memcached Node. - Attributes: cpu_count (int): Required. Number of cpus per Memcached node. @@ -142,13 +137,11 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field(proto.INT32, number=1) - - memory_size_mb = proto.Field(proto.INT32, number=2) + cpu_count = proto.Field(proto.INT32, number=1,) + memory_size_mb = proto.Field(proto.INT32, number=2,) class Node(proto.Message): r""" - Attributes: node_id (str): Output only. Identifier of the Memcached @@ -180,21 +173,15 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field(proto.STRING, number=1) - - zone = proto.Field(proto.STRING, number=2) - + node_id = proto.Field(proto.STRING, number=1,) + zone = proto.Field(proto.STRING, number=2,) state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) - - host = proto.Field(proto.STRING, number=4) - - port = proto.Field(proto.INT32, number=5) - + host = proto.Field(proto.STRING, number=4,) + port = proto.Field(proto.INT32, number=5,) parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) class InstanceMessage(proto.Message): r""" - Attributes: code (google.cloud.memcache_v1.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- @@ -210,42 +197,30 @@ class Code(proto.Enum): ZONE_DISTRIBUTION_UNBALANCED = 1 code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) - - message = proto.Field(proto.STRING, number=2) - - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) - - authorized_network = proto.Field(proto.STRING, number=4) - - zones = proto.RepeatedField(proto.STRING, number=5) - - node_count = proto.Field(proto.INT32, number=6) - + message = proto.Field(proto.STRING, number=2,) + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) + authorized_network = proto.Field(proto.STRING, number=4,) + zones = proto.RepeatedField(proto.STRING, number=5,) + node_count = proto.Field(proto.INT32, number=6,) node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) - memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) - parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) - memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) - - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) - + create_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) state = proto.Field(proto.ENUM, number=15, enum=State,) - - memcache_full_version = proto.Field(proto.STRING, number=18) - + memcache_full_version = proto.Field(proto.STRING, number=18,) instance_messages = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - - discovery_endpoint = proto.Field(proto.STRING, number=20) + discovery_endpoint = proto.Field(proto.STRING, number=20,) class ListInstancesRequest(proto.Message): @@ -278,15 +253,11 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListInstancesResponse(proto.Message): @@ -314,10 +285,8 @@ def raw_page(self): return self instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetInstanceRequest(proto.Message): @@ -331,7 +300,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateInstanceRequest(proto.Message): @@ -360,10 +329,8 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached Instance """ - parent = proto.Field(proto.STRING, number=1) - - instance_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) @@ -381,8 +348,9 @@ class UpdateInstanceRequest(proto.Message): update_mask are updated. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) instance = proto.Field(proto.MESSAGE, number=2, message="Instance",) @@ -397,7 +365,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ApplyParametersRequest(proto.Message): @@ -420,11 +388,9 @@ class ApplyParametersRequest(proto.Message): nodes within the instance. """ - name = proto.Field(proto.STRING, number=1) - - node_ids = proto.RepeatedField(proto.STRING, number=2) - - apply_all = proto.Field(proto.BOOL, number=3) + name = proto.Field(proto.STRING, number=1,) + node_ids = proto.RepeatedField(proto.STRING, number=2,) + apply_all = proto.Field(proto.BOOL, number=3,) class UpdateParametersRequest(proto.Message): @@ -442,16 +408,15 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field(proto.STRING, number=1) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) - + name = proto.Field(proto.STRING, number=1,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) class MemcacheParameters(proto.Message): r""" - Attributes: id (str): Output only. The unique ID associated with @@ -465,14 +430,12 @@ class MemcacheParameters(proto.Message): memcached process. """ - id = proto.Field(proto.STRING, number=1) - - params = proto.MapField(proto.STRING, proto.STRING, number=3) + id = proto.Field(proto.STRING, number=1,) + params = proto.MapField(proto.STRING, proto.STRING, number=3,) class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. - Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -500,19 +463,13 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - target = proto.Field(proto.STRING, number=3) - - verb = proto.Field(proto.STRING, number=4) - - status_detail = proto.Field(proto.STRING, number=5) - - cancel_requested = proto.Field(proto.BOOL, number=6) - - api_version = proto.Field(proto.STRING, number=7) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_detail = proto.Field(proto.STRING, number=5,) + cancel_requested = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index bac2393..54fa8a3 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +15,8 @@ # from .services.cloud_memcache import CloudMemcacheClient +from .services.cloud_memcache import CloudMemcacheAsyncClient + from .types.cloud_memcache import ApplyParametersRequest from .types.cloud_memcache import ApplySoftwareUpdateRequest from .types.cloud_memcache import CreateInstanceRequest @@ -26,16 +27,17 @@ from .types.cloud_memcache import ListInstancesResponse from .types.cloud_memcache import LocationMetadata from .types.cloud_memcache import MemcacheParameters -from .types.cloud_memcache import MemcacheVersion from .types.cloud_memcache import OperationMetadata from .types.cloud_memcache import UpdateInstanceRequest from .types.cloud_memcache import UpdateParametersRequest from .types.cloud_memcache import ZoneMetadata - +from .types.cloud_memcache import MemcacheVersion __all__ = ( + "CloudMemcacheAsyncClient", "ApplyParametersRequest", "ApplySoftwareUpdateRequest", + "CloudMemcacheClient", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", @@ -49,5 +51,4 @@ "UpdateInstanceRequest", "UpdateParametersRequest", "ZoneMetadata", - "CloudMemcacheClient", ) diff --git a/google/cloud/memcache_v1beta2/gapic_metadata.json b/google/cloud/memcache_v1beta2/gapic_metadata.json new file mode 100644 index 0000000..288ef89 --- /dev/null +++ b/google/cloud/memcache_v1beta2/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.memcache_v1beta2", + "protoPackage": "google.cloud.memcache.v1beta2", + "schema": "1.0", + "services": { + "CloudMemcache": { + "clients": { + "grpc": { + "libraryClient": "CloudMemcacheClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "ApplySoftwareUpdate": { + "methods": [ + "apply_software_update" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudMemcacheAsyncClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "ApplySoftwareUpdate": { + "methods": [ + "apply_software_update" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/memcache_v1beta2/services/__init__.py b/google/cloud/memcache_v1beta2/services/__init__.py index 42ffdf2..4de6597 100644 --- a/google/cloud/memcache_v1beta2/services/__init__.py +++ b/google/cloud/memcache_v1beta2/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py index 8524cb4..efb245e 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import CloudMemcacheClient from .async_client import CloudMemcacheAsyncClient diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index b687488..4417473 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,20 +20,19 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport from .client import CloudMemcacheClient @@ -70,31 +67,26 @@ class CloudMemcacheAsyncClient: instance_path = staticmethod(CloudMemcacheClient.instance_path) parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) - common_billing_account_path = staticmethod( CloudMemcacheClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( CloudMemcacheClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) parse_common_folder_path = staticmethod( CloudMemcacheClient.parse_common_folder_path ) - common_organization_path = staticmethod( CloudMemcacheClient.common_organization_path ) parse_common_organization_path = staticmethod( CloudMemcacheClient.parse_common_organization_path ) - common_project_path = staticmethod(CloudMemcacheClient.common_project_path) parse_common_project_path = staticmethod( CloudMemcacheClient.parse_common_project_path ) - common_location_path = staticmethod(CloudMemcacheClient.common_location_path) parse_common_location_path = staticmethod( CloudMemcacheClient.parse_common_location_path @@ -102,7 +94,8 @@ class CloudMemcacheAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -117,7 +110,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -134,7 +127,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: CloudMemcacheTransport: The transport used by the client instance. @@ -148,12 +141,12 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cloud memcache client. + """Instantiates the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -185,7 +178,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = CloudMemcacheClient( credentials=credentials, transport=transport, @@ -217,7 +209,6 @@ async def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -247,7 +238,6 @@ async def list_instances( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -301,7 +291,6 @@ async def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -326,7 +315,6 @@ async def get_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -398,7 +386,6 @@ async def create_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -428,7 +415,6 @@ async def create_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if instance_id is not None: @@ -468,7 +454,7 @@ async def update_instance( self, request: cloud_memcache.UpdateInstanceRequest = None, *, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, resource: cloud_memcache.Instance = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -496,7 +482,6 @@ async def update_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -526,7 +511,6 @@ async def update_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if update_mask is not None: request.update_mask = update_mask if resource is not None: @@ -567,7 +551,7 @@ async def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -602,7 +586,6 @@ async def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -632,7 +615,6 @@ async def update_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if update_mask is not None: @@ -692,7 +674,6 @@ async def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -732,7 +713,6 @@ async def delete_instance( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -757,7 +737,7 @@ async def delete_instance( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -807,7 +787,6 @@ async def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -837,12 +816,10 @@ async def apply_parameters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if apply_all is not None: request.apply_all = apply_all - if node_ids: request.node_ids.extend(node_ids) @@ -919,7 +896,6 @@ async def apply_software_update( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -949,12 +925,10 @@ async def apply_software_update( # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: request.instance = instance if apply_all is not None: request.apply_all = apply_all - if node_ids: request.node_ids.extend(node_ids) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 8d43719..65a2bc3 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,10 +34,9 @@ from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport @@ -58,7 +55,7 @@ class CloudMemcacheClientMeta(type): _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -100,7 +97,8 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -134,7 +132,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -151,7 +150,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -170,23 +169,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> CloudMemcacheTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - CloudMemcacheTransport: The transport used by the client instance. + CloudMemcacheTransport: The transport used by the client + instance. """ return self._transport @staticmethod def instance_path(project: str, location: str, instance: str,) -> str: - """Return a fully-qualified instance string.""" + """Returns a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @staticmethod def parse_instance_path(path: str) -> Dict[str, str]: - """Parse a instance path into its component segments.""" + """Parses a instance path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path, @@ -195,7 +195,7 @@ def parse_instance_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -208,7 +208,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -219,7 +219,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -230,7 +230,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -241,7 +241,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -255,12 +255,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cloud memcache client. + """Instantiates the cloud memcache client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -315,9 +315,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -329,12 +330,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -349,8 +352,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -389,7 +392,6 @@ def list_instances( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -421,10 +423,8 @@ def list_instances( # there are no flattened fields. if not isinstance(request, cloud_memcache.ListInstancesRequest): request = cloud_memcache.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -474,7 +474,6 @@ def get_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -501,10 +500,8 @@ def get_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.GetInstanceRequest): request = cloud_memcache.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -572,7 +569,6 @@ def create_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -604,10 +600,8 @@ def create_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.CreateInstanceRequest): request = cloud_memcache.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if instance_id is not None: @@ -643,7 +637,7 @@ def update_instance( self, request: cloud_memcache.UpdateInstanceRequest = None, *, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, resource: cloud_memcache.Instance = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -671,7 +665,6 @@ def update_instance( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -703,10 +696,8 @@ def update_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateInstanceRequest): request = cloud_memcache.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if update_mask is not None: request.update_mask = update_mask if resource is not None: @@ -743,7 +734,7 @@ def update_parameters( request: cloud_memcache.UpdateParametersRequest = None, *, name: str = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -778,7 +769,6 @@ def update_parameters( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -810,10 +800,8 @@ def update_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.UpdateParametersRequest): request = cloud_memcache.UpdateParametersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if update_mask is not None: @@ -869,7 +857,6 @@ def delete_instance( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -911,10 +898,8 @@ def delete_instance( # there are no flattened fields. if not isinstance(request, cloud_memcache.DeleteInstanceRequest): request = cloud_memcache.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -935,7 +920,7 @@ def delete_instance( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=cloud_memcache.OperationMetadata, ) @@ -985,7 +970,6 @@ def apply_parameters( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1017,10 +1001,8 @@ def apply_parameters( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplyParametersRequest): request = cloud_memcache.ApplyParametersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if node_ids is not None: @@ -1097,7 +1079,6 @@ def apply_software_update( This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1129,10 +1110,8 @@ def apply_software_update( # there are no flattened fields. if not isinstance(request, cloud_memcache.ApplySoftwareUpdateRequest): request = cloud_memcache.ApplySoftwareUpdateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: request.instance = instance if node_ids is not None: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index 5b69afd..381459d 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py index 38122c6..32ad848 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index a1b9e2c..1fb1292 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -37,27 +36,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "memcache.googleapis.com" + def __init__( self, *, - host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -66,7 +79,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,29 +93,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -142,11 +202,11 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_instances( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ListInstancesRequest], - typing.Union[ + Union[ cloud_memcache.ListInstancesResponse, - typing.Awaitable[cloud_memcache.ListInstancesResponse], + Awaitable[cloud_memcache.ListInstancesResponse], ], ]: raise NotImplementedError() @@ -154,65 +214,63 @@ def list_instances( @property def get_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.GetInstanceRequest], - typing.Union[ - cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] - ], + Union[cloud_memcache.Instance, Awaitable[cloud_memcache.Instance]], ]: raise NotImplementedError() @property def create_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.CreateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.UpdateInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_parameters( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.UpdateParametersRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_instance( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.DeleteInstanceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def apply_parameters( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ApplyParametersRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def apply_software_update( self, - ) -> typing.Callable[ + ) -> Callable[ [cloud_memcache.ApplySoftwareUpdateRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index f5f202f..cf61dee 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO @@ -70,7 +67,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -84,7 +81,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -195,7 +193,7 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -226,13 +224,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -313,7 +313,7 @@ def get_instance( @property def create_instance( self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a new Instance in a given location. @@ -332,14 +332,14 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @property def update_instance( self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. Updates an existing Instance in a given project and @@ -359,14 +359,14 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_instance"] @property def update_parameters( self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: r"""Return a callable for the update parameters method over gRPC. Updates the defined Memcached parameters for an existing @@ -388,14 +388,14 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_parameters"] @property def delete_instance( self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a single Instance. @@ -414,14 +414,14 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @property def apply_parameters( self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: r"""Return a callable for the apply parameters method over gRPC. ``ApplyParameters`` restarts the set of specified nodes in order @@ -442,14 +442,16 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_parameters"] @property def apply_software_update( self, - ) -> Callable[[cloud_memcache.ApplySoftwareUpdateRequest], operations.Operation]: + ) -> Callable[ + [cloud_memcache.ApplySoftwareUpdateRequest], operations_pb2.Operation + ]: r"""Return a callable for the apply software update method over gRPC. Updates software on the selected nodes of the @@ -469,7 +471,7 @@ def apply_software_update( self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_software_update"] diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index f2452b4..e5a9f15 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO from .grpc import CloudMemcacheGrpcTransport @@ -73,7 +70,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -100,13 +97,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -114,7 +113,7 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -128,7 +127,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -187,7 +187,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -323,7 +322,7 @@ def get_instance( def create_instance( self, ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.CreateInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the create instance method over gRPC. @@ -343,7 +342,7 @@ def create_instance( self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @@ -351,7 +350,7 @@ def create_instance( def update_instance( self, ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update instance method over gRPC. @@ -372,7 +371,7 @@ def update_instance( self._stubs["update_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_instance"] @@ -380,7 +379,7 @@ def update_instance( def update_parameters( self, ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] + [cloud_memcache.UpdateParametersRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update parameters method over gRPC. @@ -403,7 +402,7 @@ def update_parameters( self._stubs["update_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_parameters"] @@ -411,7 +410,7 @@ def update_parameters( def delete_instance( self, ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the delete instance method over gRPC. @@ -431,7 +430,7 @@ def delete_instance( self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @@ -439,7 +438,7 @@ def delete_instance( def apply_parameters( self, ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] + [cloud_memcache.ApplyParametersRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the apply parameters method over gRPC. @@ -461,7 +460,7 @@ def apply_parameters( self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_parameters"] @@ -469,7 +468,7 @@ def apply_parameters( def apply_software_update( self, ) -> Callable[ - [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations.Operation] + [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the apply software update method over gRPC. @@ -490,7 +489,7 @@ def apply_software_update( self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["apply_software_update"] diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 90cf3eb..a4e788a 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .cloud_memcache import ( ApplyParametersRequest, ApplySoftwareUpdateRequest, diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index b7e9a43..6fdd3b8 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -52,7 +49,6 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): r"""A Memorystore for Memcached instance - Attributes: name (str): Required. Unique name of the resource in this scope @@ -142,7 +138,6 @@ class State(proto.Enum): class NodeConfig(proto.Message): r"""Configuration for a Memcached Node. - Attributes: cpu_count (int): Required. Number of cpus per Memcached node. @@ -151,13 +146,11 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field(proto.INT32, number=1) - - memory_size_mb = proto.Field(proto.INT32, number=2) + cpu_count = proto.Field(proto.INT32, number=1,) + memory_size_mb = proto.Field(proto.INT32, number=2,) class Node(proto.Message): r""" - Attributes: node_id (str): Output only. Identifier of the Memcached @@ -192,23 +185,16 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field(proto.STRING, number=1) - - zone = proto.Field(proto.STRING, number=2) - + node_id = proto.Field(proto.STRING, number=1,) + zone = proto.Field(proto.STRING, number=2,) state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) - - host = proto.Field(proto.STRING, number=4) - - port = proto.Field(proto.INT32, number=5) - + host = proto.Field(proto.STRING, number=4,) + port = proto.Field(proto.INT32, number=5,) parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) - - update_available = proto.Field(proto.BOOL, number=7) + update_available = proto.Field(proto.BOOL, number=7,) class InstanceMessage(proto.Message): r""" - Attributes: code (google.cloud.memcache_v1beta2.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- @@ -224,44 +210,31 @@ class Code(proto.Enum): ZONE_DISTRIBUTION_UNBALANCED = 1 code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) - - message = proto.Field(proto.STRING, number=2) - - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) - - authorized_network = proto.Field(proto.STRING, number=4) - - zones = proto.RepeatedField(proto.STRING, number=5) - - node_count = proto.Field(proto.INT32, number=6) - + message = proto.Field(proto.STRING, number=2,) + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) + authorized_network = proto.Field(proto.STRING, number=4,) + zones = proto.RepeatedField(proto.STRING, number=5,) + node_count = proto.Field(proto.INT32, number=6,) node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) - memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) - parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) - memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) - - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) - + create_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) state = proto.Field(proto.ENUM, number=15, enum=State,) - - memcache_full_version = proto.Field(proto.STRING, number=18) - + memcache_full_version = proto.Field(proto.STRING, number=18,) instance_messages = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - - discovery_endpoint = proto.Field(proto.STRING, number=20) - - update_available = proto.Field(proto.BOOL, number=21) + discovery_endpoint = proto.Field(proto.STRING, number=20,) + update_available = proto.Field(proto.BOOL, number=21,) class ListInstancesRequest(proto.Message): @@ -294,15 +267,11 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListInstancesResponse(proto.Message): @@ -330,10 +299,8 @@ def raw_page(self): return self resources = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetInstanceRequest(proto.Message): @@ -347,7 +314,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateInstanceRequest(proto.Message): @@ -376,10 +343,8 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached [Instance] resource """ - parent = proto.Field(proto.STRING, number=1) - - instance_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) resource = proto.Field(proto.MESSAGE, number=3, message="Instance",) @@ -397,8 +362,9 @@ class UpdateInstanceRequest(proto.Message): specified in update_mask are updated. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) resource = proto.Field(proto.MESSAGE, number=2, message="Instance",) @@ -413,7 +379,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ApplyParametersRequest(proto.Message): @@ -435,11 +401,9 @@ class ApplyParametersRequest(proto.Message): within the instance. """ - name = proto.Field(proto.STRING, number=1) - - node_ids = proto.RepeatedField(proto.STRING, number=2) - - apply_all = proto.Field(proto.BOOL, number=3) + name = proto.Field(proto.STRING, number=1,) + node_ids = proto.RepeatedField(proto.STRING, number=2,) + apply_all = proto.Field(proto.BOOL, number=3,) class UpdateParametersRequest(proto.Message): @@ -457,10 +421,10 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field(proto.STRING, number=1) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) - + name = proto.Field(proto.STRING, number=1,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) @@ -485,11 +449,9 @@ class ApplySoftwareUpdateRequest(proto.Message): instance. """ - instance = proto.Field(proto.STRING, number=1) - - node_ids = proto.RepeatedField(proto.STRING, number=2) - - apply_all = proto.Field(proto.BOOL, number=3) + instance = proto.Field(proto.STRING, number=1,) + node_ids = proto.RepeatedField(proto.STRING, number=2,) + apply_all = proto.Field(proto.BOOL, number=3,) class MemcacheParameters(proto.Message): @@ -507,14 +469,12 @@ class MemcacheParameters(proto.Message): memcached process. """ - id = proto.Field(proto.STRING, number=1) - - params = proto.MapField(proto.STRING, proto.STRING, number=3) + id = proto.Field(proto.STRING, number=1,) + params = proto.MapField(proto.STRING, proto.STRING, number=3,) class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. - Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -542,19 +502,13 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - target = proto.Field(proto.STRING, number=3) - - verb = proto.Field(proto.STRING, number=4) - - status_detail = proto.Field(proto.STRING, number=5) - - cancel_requested = proto.Field(proto.BOOL, number=6) - - api_version = proto.Field(proto.STRING, number=7) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_detail = proto.Field(proto.STRING, number=5,) + cancel_requested = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) class LocationMetadata(proto.Message): @@ -575,7 +529,7 @@ class LocationMetadata(proto.Message): class ZoneMetadata(proto.Message): - r"""""" + r""" """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py index eac442a..774b03f 100644 --- a/scripts/fixup_memcache_v1_keywords.py +++ b/scripts/fixup_memcache_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,14 +39,13 @@ def partition( class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'instance', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), - + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'instance', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -79,7 +76,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index 4267b6f..afe0f42 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,15 +39,14 @@ def partition( class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'resource', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'resource', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), - + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'resource', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'resource', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -80,7 +77,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/setup.py b/setup.py index f8353b2..0490fba 100644 --- a/setup.py +++ b/setup.py @@ -42,6 +42,7 @@ install_requires=( "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.4.0", + "packaging >= 14.3", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index a929701..3d61f06 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -7,3 +7,5 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.22.2 proto-plus==1.15.0 +packaging==14.3 +google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is required through google-api-core diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py index 42ffdf2..4de6597 100644 --- a/tests/unit/gapic/memcache_v1/__init__.py +++ b/tests/unit/gapic/memcache_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index b4793ec..4970a18 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,26 +23,56 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheAsyncClient from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.services.cloud_memcache import transports +from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.memcache_v1.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -94,7 +123,7 @@ def test__get_default_mtls_endpoint(): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -111,7 +140,7 @@ def test_cloud_memcache_client_from_service_account_info(client_class): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -164,7 +193,7 @@ def test_cloud_memcache_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -452,7 +481,7 @@ def test_list_instances( transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -465,21 +494,16 @@ def test_list_instances( call.return_value = cloud_memcache.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -491,7 +515,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -499,7 +523,6 @@ def test_list_instances_empty_call(): client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() @@ -508,7 +531,7 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -524,20 +547,16 @@ async def test_list_instances_async( unreachable=["unreachable_value"], ) ) - response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -547,17 +566,17 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_memcache.ListInstancesResponse() - client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -572,11 +591,14 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -584,7 +606,6 @@ async def test_list_instances_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.ListInstancesResponse() ) - await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -598,13 +619,12 @@ async def test_list_instances_field_headers_async(): def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances(parent="parent_value",) @@ -613,12 +633,11 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -630,7 +649,9 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -648,13 +669,14 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -665,7 +687,7 @@ async def test_list_instances_flattened_error_async(): def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -703,7 +725,7 @@ def test_list_instances_pager(): def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -733,7 +755,7 @@ def test_list_instances_pages(): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -770,7 +792,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -806,7 +828,7 @@ def test_get_instance( transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -827,35 +849,23 @@ def test_get_instance( memcache_full_version="memcache_full_version_value", discovery_endpoint="discovery_endpoint_value", ) - response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" @@ -867,7 +877,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -875,7 +885,6 @@ def test_get_instance_empty_call(): client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() @@ -884,7 +893,7 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -907,34 +916,23 @@ async def test_get_instance_async( discovery_endpoint="discovery_endpoint_value", ) ) - response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" @@ -944,17 +942,17 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_memcache.Instance() - client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -969,11 +967,14 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -981,7 +982,6 @@ async def test_get_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.Instance() ) - await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -995,13 +995,12 @@ async def test_get_instance_field_headers_async(): def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.Instance() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance(name="name_value",) @@ -1010,12 +1009,11 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1027,7 +1025,9 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1045,13 +1045,14 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1065,7 +1066,7 @@ def test_create_instance( transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1076,13 +1077,11 @@ def test_create_instance( with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1097,7 +1096,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1105,7 +1104,6 @@ def test_create_instance_empty_call(): client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() @@ -1114,7 +1112,7 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1127,13 +1125,11 @@ async def test_create_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1146,17 +1142,17 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1171,11 +1167,14 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1183,7 +1182,6 @@ async def test_create_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1197,13 +1195,12 @@ async def test_create_instance_field_headers_async(): def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( @@ -1216,16 +1213,13 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].instance_id == "instance_id_value" def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1240,7 +1234,9 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1262,17 +1258,16 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].instance_id == "instance_id_value" @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1289,7 +1284,7 @@ def test_update_instance( transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1300,13 +1295,11 @@ def test_update_instance( with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1321,7 +1314,7 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1329,7 +1322,6 @@ def test_update_instance_empty_call(): client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() @@ -1338,7 +1330,7 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1351,13 +1343,11 @@ async def test_update_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1370,17 +1360,17 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() + request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1397,11 +1387,14 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() + request.instance.name = "instance.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1409,7 +1402,6 @@ async def test_update_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1425,32 +1417,29 @@ async def test_update_instance_field_headers_async(): def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].instance == cloud_memcache.Instance(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1458,13 +1447,15 @@ def test_update_instance_flattened_error(): client.update_instance( cloud_memcache.UpdateInstanceRequest(), instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1478,22 +1469,22 @@ async def test_update_instance_flattened_async(): # using the keyword arguments to the method. response = await client.update_instance( instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].instance == cloud_memcache.Instance(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1501,7 +1492,7 @@ async def test_update_instance_flattened_error_async(): await client.update_instance( cloud_memcache.UpdateInstanceRequest(), instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1509,7 +1500,7 @@ def test_update_parameters( transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1522,13 +1513,11 @@ def test_update_parameters( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1543,7 +1532,7 @@ def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1553,7 +1542,6 @@ def test_update_parameters_empty_call(): client.update_parameters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() @@ -1562,7 +1550,7 @@ async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1577,13 +1565,11 @@ async def test_update_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1596,11 +1582,12 @@ async def test_update_parameters_async_from_dict(): def test_update_parameters_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1608,7 +1595,6 @@ def test_update_parameters_field_headers(): type(client.transport.update_parameters), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1623,11 +1609,14 @@ def test_update_parameters_field_headers(): @pytest.mark.asyncio async def test_update_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1637,7 +1626,6 @@ async def test_update_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1651,7 +1639,7 @@ async def test_update_parameters_field_headers_async(): def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1659,12 +1647,11 @@ def test_update_parameters_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_parameters( name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1672,16 +1659,13 @@ def test_update_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1689,14 +1673,16 @@ def test_update_parameters_flattened_error(): client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @pytest.mark.asyncio async def test_update_parameters_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1712,7 +1698,7 @@ async def test_update_parameters_flattened_async(): # using the keyword arguments to the method. response = await client.update_parameters( name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1720,17 +1706,16 @@ async def test_update_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") @pytest.mark.asyncio async def test_update_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1738,7 +1723,7 @@ async def test_update_parameters_flattened_error_async(): await client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1747,7 +1732,7 @@ def test_delete_instance( transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1758,13 +1743,11 @@ def test_delete_instance( with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1779,7 +1762,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1787,7 +1770,6 @@ def test_delete_instance_empty_call(): client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() @@ -1796,7 +1778,7 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1809,13 +1791,11 @@ async def test_delete_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1828,17 +1808,17 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1853,11 +1833,14 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1865,7 +1848,6 @@ async def test_delete_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1879,13 +1861,12 @@ async def test_delete_instance_field_headers_async(): def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance(name="name_value",) @@ -1894,12 +1875,11 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1911,7 +1891,9 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1929,13 +1911,14 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1949,7 +1932,7 @@ def test_apply_parameters( transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1960,13 +1943,11 @@ def test_apply_parameters( with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -1981,7 +1962,7 @@ def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1989,7 +1970,6 @@ def test_apply_parameters_empty_call(): client.apply_parameters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() @@ -1998,7 +1978,7 @@ async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2011,13 +1991,11 @@ async def test_apply_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -2030,17 +2008,17 @@ async def test_apply_parameters_async_from_dict(): def test_apply_parameters_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2055,11 +2033,14 @@ def test_apply_parameters_field_headers(): @pytest.mark.asyncio async def test_apply_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2067,7 +2048,6 @@ async def test_apply_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2081,13 +2061,12 @@ async def test_apply_parameters_field_headers_async(): def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_parameters( @@ -2098,16 +2077,13 @@ def test_apply_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2122,7 +2098,9 @@ def test_apply_parameters_flattened_error(): @pytest.mark.asyncio async def test_apply_parameters_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2142,17 +2120,16 @@ async def test_apply_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2168,16 +2145,16 @@ async def test_apply_parameters_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2187,7 +2164,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2198,7 +2175,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = CloudMemcacheClient(transport=transport) assert client.transport is transport @@ -2207,13 +2184,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2228,23 +2205,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) def test_cloud_memcache_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2256,7 +2233,7 @@ def test_cloud_memcache_base_transport(): ) as Transport: Transport.return_value = None transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2280,15 +2257,37 @@ def test_cloud_memcache_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2301,19 +2300,33 @@ def test_cloud_memcache_base_transport_with_credentials_file(): def test_cloud_memcache_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_cloud_memcache_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudMemcacheClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudMemcacheClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2321,20 +2334,156 @@ def test_cloud_memcache_auth_adc(): ) -def test_cloud_memcache_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cloud_memcache_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="memcache.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cloud_memcache_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cloud_memcache_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2343,7 +2492,7 @@ def test_cloud_memcache_transport_auth_adc(): ], ) def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2382,7 +2531,7 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla def test_cloud_memcache_host_no_port(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com" ), @@ -2392,7 +2541,7 @@ def test_cloud_memcache_host_no_port(): def test_cloud_memcache_host_with_port(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com:8000" ), @@ -2446,9 +2595,9 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2524,7 +2673,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): def test_cloud_memcache_grpc_lro_client(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2537,7 +2686,7 @@ def test_cloud_memcache_grpc_lro_client(): def test_cloud_memcache_grpc_lro_async_client(): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2552,7 +2701,6 @@ def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @@ -2575,7 +2723,6 @@ def test_parse_instance_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2596,7 +2743,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = CloudMemcacheClient.common_folder_path(folder) assert expected == actual @@ -2615,7 +2761,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = CloudMemcacheClient.common_organization_path(organization) assert expected == actual @@ -2634,7 +2779,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = CloudMemcacheClient.common_project_path(project) assert expected == actual @@ -2654,7 +2798,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2681,7 +2824,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.CloudMemcacheTransport, "_prep_wrapped_messages" ) as prep: client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2690,6 +2833,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = CloudMemcacheClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/memcache_v1beta2/__init__.py b/tests/unit/gapic/memcache_v1beta2/__init__.py index 42ffdf2..4de6597 100644 --- a/tests/unit/gapic/memcache_v1beta2/__init__.py +++ b/tests/unit/gapic/memcache_v1beta2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 1e242ab..b991eec 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.memcache_v1beta2.services.cloud_memcache import ( CloudMemcacheAsyncClient, @@ -41,11 +40,41 @@ from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.services.cloud_memcache import transports +from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.memcache_v1beta2.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -96,7 +125,7 @@ def test__get_default_mtls_endpoint(): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -113,7 +142,7 @@ def test_cloud_memcache_client_from_service_account_info(client_class): "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -166,7 +195,7 @@ def test_cloud_memcache_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -454,7 +483,7 @@ def test_list_instances( transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -467,21 +496,16 @@ def test_list_instances( call.return_value = cloud_memcache.ListInstancesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -493,7 +517,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -501,7 +525,6 @@ def test_list_instances_empty_call(): client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() @@ -510,7 +533,7 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -526,20 +549,16 @@ async def test_list_instances_async( unreachable=["unreachable_value"], ) ) - response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -549,17 +568,17 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_memcache.ListInstancesResponse() - client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -574,11 +593,14 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -586,7 +608,6 @@ async def test_list_instances_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.ListInstancesResponse() ) - await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -600,13 +621,12 @@ async def test_list_instances_field_headers_async(): def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances(parent="parent_value",) @@ -615,12 +635,11 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -632,7 +651,9 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -650,13 +671,14 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -667,7 +689,7 @@ async def test_list_instances_flattened_error_async(): def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -705,7 +727,7 @@ def test_list_instances_pager(): def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -735,7 +757,7 @@ def test_list_instances_pages(): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -772,7 +794,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -808,7 +830,7 @@ def test_get_instance( transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -830,37 +852,24 @@ def test_get_instance( discovery_endpoint="discovery_endpoint_value", update_available=True, ) - response = client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - assert response.update_available is True @@ -872,7 +881,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -880,7 +889,6 @@ def test_get_instance_empty_call(): client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() @@ -889,7 +897,7 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -913,36 +921,24 @@ async def test_get_instance_async( update_available=True, ) ) - response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - assert response.update_available is True @@ -952,17 +948,17 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_memcache.Instance() - client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -977,11 +973,14 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -989,7 +988,6 @@ async def test_get_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_memcache.Instance() ) - await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1003,13 +1001,12 @@ async def test_get_instance_field_headers_async(): def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.Instance() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance(name="name_value",) @@ -1018,12 +1015,11 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1035,7 +1031,9 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1053,13 +1051,14 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1073,7 +1072,7 @@ def test_create_instance( transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1084,13 +1083,11 @@ def test_create_instance( with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1105,7 +1102,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1113,7 +1110,6 @@ def test_create_instance_empty_call(): client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() @@ -1122,7 +1118,7 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1135,13 +1131,11 @@ async def test_create_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() # Establish that the response is the type that we expect. @@ -1154,17 +1148,17 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1179,11 +1173,14 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1191,7 +1188,6 @@ async def test_create_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1205,13 +1201,12 @@ async def test_create_instance_field_headers_async(): def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( @@ -1224,16 +1219,13 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance_id == "instance_id_value" - assert args[0].resource == cloud_memcache.Instance(name="name_value") def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1248,7 +1240,9 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1270,17 +1264,16 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance_id == "instance_id_value" - assert args[0].resource == cloud_memcache.Instance(name="name_value") @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1297,7 +1290,7 @@ def test_update_instance( transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1308,13 +1301,11 @@ def test_update_instance( with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1329,7 +1320,7 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1337,7 +1328,6 @@ def test_update_instance_empty_call(): client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() @@ -1346,7 +1336,7 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1359,13 +1349,11 @@ async def test_update_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() # Establish that the response is the type that we expect. @@ -1378,17 +1366,17 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() + request.resource.name = "resource.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1405,11 +1393,14 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() + request.resource.name = "resource.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1417,7 +1408,6 @@ async def test_update_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1433,17 +1423,16 @@ async def test_update_instance_field_headers_async(): def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1451,28 +1440,28 @@ def test_update_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].resource == cloud_memcache.Instance(name="name_value") def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_instance( cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @pytest.mark.asyncio async def test_update_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1485,7 +1474,7 @@ async def test_update_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_instance( - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1493,22 +1482,22 @@ async def test_update_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].resource == cloud_memcache.Instance(name="name_value") @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_instance( cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), resource=cloud_memcache.Instance(name="name_value"), ) @@ -1517,7 +1506,7 @@ def test_update_parameters( transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1530,13 +1519,11 @@ def test_update_parameters( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1551,7 +1538,7 @@ def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1561,7 +1548,6 @@ def test_update_parameters_empty_call(): client.update_parameters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() @@ -1570,7 +1556,7 @@ async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1585,13 +1571,11 @@ async def test_update_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() # Establish that the response is the type that we expect. @@ -1604,11 +1588,12 @@ async def test_update_parameters_async_from_dict(): def test_update_parameters_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1616,7 +1601,6 @@ def test_update_parameters_field_headers(): type(client.transport.update_parameters), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1631,11 +1615,14 @@ def test_update_parameters_field_headers(): @pytest.mark.asyncio async def test_update_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1645,7 +1632,6 @@ async def test_update_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -1659,7 +1645,7 @@ async def test_update_parameters_field_headers_async(): def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1667,12 +1653,11 @@ def test_update_parameters_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_parameters( name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1680,16 +1665,13 @@ def test_update_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1697,14 +1679,16 @@ def test_update_parameters_flattened_error(): client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @pytest.mark.asyncio async def test_update_parameters_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1720,7 +1704,7 @@ async def test_update_parameters_flattened_async(): # using the keyword arguments to the method. response = await client.update_parameters( name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1728,17 +1712,16 @@ async def test_update_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) - + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") @pytest.mark.asyncio async def test_update_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1746,7 +1729,7 @@ async def test_update_parameters_flattened_error_async(): await client.update_parameters( cloud_memcache.UpdateParametersRequest(), name="name_value", - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), parameters=cloud_memcache.MemcacheParameters(id="id_value"), ) @@ -1755,7 +1738,7 @@ def test_delete_instance( transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1766,13 +1749,11 @@ def test_delete_instance( with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1787,7 +1768,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1795,7 +1776,6 @@ def test_delete_instance_empty_call(): client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() @@ -1804,7 +1784,7 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1817,13 +1797,11 @@ async def test_delete_instance_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() # Establish that the response is the type that we expect. @@ -1836,17 +1814,17 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1861,11 +1839,14 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1873,7 +1854,6 @@ async def test_delete_instance_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1887,13 +1867,12 @@ async def test_delete_instance_field_headers_async(): def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance(name="name_value",) @@ -1902,12 +1881,11 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1919,7 +1897,9 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1937,13 +1917,14 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1957,7 +1938,7 @@ def test_apply_parameters( transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1968,13 +1949,11 @@ def test_apply_parameters( with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -1989,7 +1968,7 @@ def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1997,7 +1976,6 @@ def test_apply_parameters_empty_call(): client.apply_parameters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() @@ -2006,7 +1984,7 @@ async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2019,13 +1997,11 @@ async def test_apply_parameters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() # Establish that the response is the type that we expect. @@ -2038,17 +2014,17 @@ async def test_apply_parameters_async_from_dict(): def test_apply_parameters_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2063,11 +2039,14 @@ def test_apply_parameters_field_headers(): @pytest.mark.asyncio async def test_apply_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2075,7 +2054,6 @@ async def test_apply_parameters_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.apply_parameters(request) # Establish that the underlying gRPC stub method was called. @@ -2089,13 +2067,12 @@ async def test_apply_parameters_field_headers_async(): def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_parameters( @@ -2106,16 +2083,13 @@ def test_apply_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2130,7 +2104,9 @@ def test_apply_parameters_flattened_error(): @pytest.mark.asyncio async def test_apply_parameters_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2150,17 +2126,16 @@ async def test_apply_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2177,7 +2152,7 @@ def test_apply_software_update( transport: str = "grpc", request_type=cloud_memcache.ApplySoftwareUpdateRequest ): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2190,13 +2165,11 @@ def test_apply_software_update( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() # Establish that the response is the type that we expect. @@ -2211,7 +2184,7 @@ def test_apply_software_update_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2221,7 +2194,6 @@ def test_apply_software_update_empty_call(): client.apply_software_update() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() @@ -2231,7 +2203,7 @@ async def test_apply_software_update_async( request_type=cloud_memcache.ApplySoftwareUpdateRequest, ): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2246,13 +2218,11 @@ async def test_apply_software_update_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() # Establish that the response is the type that we expect. @@ -2265,11 +2235,12 @@ async def test_apply_software_update_async_from_dict(): def test_apply_software_update_field_headers(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplySoftwareUpdateRequest() + request.instance = "instance/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2277,7 +2248,6 @@ def test_apply_software_update_field_headers(): type(client.transport.apply_software_update), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. @@ -2292,11 +2262,14 @@ def test_apply_software_update_field_headers(): @pytest.mark.asyncio async def test_apply_software_update_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_memcache.ApplySoftwareUpdateRequest() + request.instance = "instance/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2306,7 +2279,6 @@ async def test_apply_software_update_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.apply_software_update(request) # Establish that the underlying gRPC stub method was called. @@ -2320,7 +2292,7 @@ async def test_apply_software_update_field_headers_async(): def test_apply_software_update_flattened(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2328,7 +2300,6 @@ def test_apply_software_update_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_software_update( @@ -2339,16 +2310,13 @@ def test_apply_software_update_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].instance == "instance_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True def test_apply_software_update_flattened_error(): - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2363,7 +2331,9 @@ def test_apply_software_update_flattened_error(): @pytest.mark.asyncio async def test_apply_software_update_flattened_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2385,17 +2355,16 @@ async def test_apply_software_update_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].instance == "instance_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True @pytest.mark.asyncio async def test_apply_software_update_flattened_error_async(): - client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2411,16 +2380,16 @@ async def test_apply_software_update_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2430,7 +2399,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudMemcacheClient( @@ -2441,7 +2410,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = CloudMemcacheClient(transport=transport) assert client.transport is transport @@ -2450,13 +2419,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudMemcacheGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2471,23 +2440,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) def test_cloud_memcache_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2499,7 +2468,7 @@ def test_cloud_memcache_base_transport(): ) as Transport: Transport.return_value = None transport = transports.CloudMemcacheTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2524,15 +2493,37 @@ def test_cloud_memcache_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2545,19 +2536,33 @@ def test_cloud_memcache_base_transport_with_credentials_file(): def test_cloud_memcache_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_cloud_memcache_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudMemcacheClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudMemcacheClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2565,20 +2570,156 @@ def test_cloud_memcache_auth_adc(): ) -def test_cloud_memcache_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cloud_memcache_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="memcache.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cloud_memcache_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudMemcacheGrpcTransport, grpc_helpers), + (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cloud_memcache_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2587,7 +2728,7 @@ def test_cloud_memcache_transport_auth_adc(): ], ) def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2626,7 +2767,7 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla def test_cloud_memcache_host_no_port(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com" ), @@ -2636,7 +2777,7 @@ def test_cloud_memcache_host_no_port(): def test_cloud_memcache_host_with_port(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com:8000" ), @@ -2690,9 +2831,9 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2768,7 +2909,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): def test_cloud_memcache_grpc_lro_client(): client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2781,7 +2922,7 @@ def test_cloud_memcache_grpc_lro_client(): def test_cloud_memcache_grpc_lro_async_client(): client = CloudMemcacheAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2796,7 +2937,6 @@ def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, instance=instance, ) @@ -2819,7 +2959,6 @@ def test_parse_instance_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2840,7 +2979,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = CloudMemcacheClient.common_folder_path(folder) assert expected == actual @@ -2859,7 +2997,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = CloudMemcacheClient.common_organization_path(organization) assert expected == actual @@ -2878,7 +3015,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = CloudMemcacheClient.common_project_path(project) assert expected == actual @@ -2898,7 +3034,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2925,7 +3060,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.CloudMemcacheTransport, "_prep_wrapped_messages" ) as prep: client = CloudMemcacheClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2934,6 +3069,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = CloudMemcacheClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) From e7f03bb915eb523afcb72ec0d2dd275739f485e5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Jun 2021 17:16:45 -0600 Subject: [PATCH 034/159] feat: add always_use_jwt_access (#79) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.50.3 Committer: @busunkim96 PiperOrigin-RevId: 382142900 Source-Link: https://github.com/googleapis/googleapis/commit/513440fda515f3c799c22a30e3906dcda325004e Source-Link: https://github.com/googleapis/googleapis-gen/commit/7b1e2c31233f79a704ec21ca410bf661d6bc68d0 * 🩉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md * fix: require google-api-core >= 1.26.0 Co-authored-by: Owl Bot Co-authored-by: Bu Sun Kim --- .coveragerc | 1 - .../cloud_memcache/transports/base.py | 42 ++---- .../cloud_memcache/transports/grpc.py | 10 +- .../cloud_memcache/transports/grpc_asyncio.py | 10 +- .../cloud_memcache/transports/base.py | 42 ++---- .../cloud_memcache/transports/grpc.py | 10 +- .../cloud_memcache/transports/grpc_asyncio.py | 10 +- setup.py | 2 +- testing/constraints-3.6.txt | 2 +- .../gapic/memcache_v1/test_cloud_memcache.py | 128 +++++------------- .../memcache_v1beta2/test_cloud_memcache.py | 128 +++++------------- 11 files changed, 132 insertions(+), 253 deletions(-) diff --git a/.coveragerc b/.coveragerc index b896f3d..028e2ca 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,7 +2,6 @@ branch = True [report] -fail_under = 100 show_missing = True omit = google/cloud/memcache/__init__.py diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 393f376..041aa53 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.memcache_v1.types import cloud_memcache from google.longrunning import operations_pb2 # type: ignore @@ -45,8 +46,6 @@ except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" @@ -64,6 +63,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -87,6 +87,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -96,7 +98,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. @@ -115,13 +117,20 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -142,27 +151,6 @@ def _get_scopes_kwargs( return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 5467640..c4c4827 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -77,6 +77,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -117,6 +118,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -170,6 +173,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: @@ -225,14 +229,14 @@ def create_channel( and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index b21cfd3..8e1ad32 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -98,14 +98,14 @@ def create_channel( aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -123,6 +123,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -164,6 +165,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -216,6 +219,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 1fb1292..ad750d6 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache from google.longrunning import operations_pb2 # type: ignore @@ -45,8 +46,6 @@ except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" @@ -64,6 +63,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -87,6 +87,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -96,7 +98,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. @@ -115,13 +117,20 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -142,27 +151,6 @@ def _get_scopes_kwargs( return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index cf61dee..b92b70e 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -77,6 +77,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -117,6 +118,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -170,6 +173,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: @@ -225,14 +229,14 @@ def create_channel( and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index e5a9f15..379650b 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -98,14 +98,14 @@ def create_channel( aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -123,6 +123,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -164,6 +165,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -216,6 +219,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/setup.py b/setup.py index 0490fba..610cebc 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ platforms="Posix; MacOS X; Windows", include_package_data=True, install_requires=( - "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "google-api-core[grpc] >= 1.26.0, <2.0.0dev", "proto-plus >= 1.4.0", "packaging >= 14.3", ), diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 3d61f06..8b9d25b 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,7 +5,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.22.2 +google-api-core==1.26.0 proto-plus==1.15.0 packaging==14.3 google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is required through google-api-core diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 4970a18..c66c989 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -38,9 +38,6 @@ from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.services.cloud_memcache import transports -from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( - _API_CORE_VERSION, -) from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( _GOOGLE_AUTH_VERSION, ) @@ -52,8 +49,9 @@ import google.auth -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -64,16 +62,6 @@ reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -136,6 +124,36 @@ def test_cloud_memcache_client_from_service_account_info(client_class): assert client.transport._host == "memcache.googleapis.com:443" +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] +) +def test_cloud_memcache_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudMemcacheGrpcTransport, "grpc"), + (transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_cloud_memcache_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + @pytest.mark.parametrize( "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) @@ -2382,7 +2400,6 @@ def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), ], ) -@requires_api_core_gte_1_26_0 def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2411,79 +2428,6 @@ def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_memcache_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_memcache_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2506,7 +2450,7 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -2613,7 +2557,7 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2660,7 +2604,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index b991eec..7b29770 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -40,9 +40,6 @@ from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.services.cloud_memcache import transports -from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( - _API_CORE_VERSION, -) from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( _GOOGLE_AUTH_VERSION, ) @@ -54,8 +51,9 @@ import google.auth -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -66,16 +64,6 @@ reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -138,6 +126,36 @@ def test_cloud_memcache_client_from_service_account_info(client_class): assert client.transport._host == "memcache.googleapis.com:443" +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] +) +def test_cloud_memcache_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudMemcacheGrpcTransport, "grpc"), + (transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_cloud_memcache_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + @pytest.mark.parametrize( "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) @@ -2618,7 +2636,6 @@ def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), ], ) -@requires_api_core_gte_1_26_0 def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2647,79 +2664,6 @@ def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_memcache_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_memcache_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2742,7 +2686,7 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -2849,7 +2793,7 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2896,7 +2840,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ From aa5f62b56135407ecfd63663f3d90e33356d3c3a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 12 Jul 2021 20:08:14 +0000 Subject: [PATCH 035/159] chore: release 1.1.0 (#77) :robot: I have created a release \*beep\* \*boop\* --- ## [1.1.0](https://www.github.com/googleapis/python-memcache/compare/v1.0.0...v1.1.0) (2021-06-30) ### Features * add always_use_jwt_access ([#79](https://www.github.com/googleapis/python-memcache/issues/79)) ([e7f03bb](https://www.github.com/googleapis/python-memcache/commit/e7f03bb915eb523afcb72ec0d2dd275739f485e5)) * support self-signed JWT flow for service accounts ([2d1aaf4](https://www.github.com/googleapis/python-memcache/commit/2d1aaf439d096857a727752ae129852b279c3658)) ### Bug Fixes * add async client to %name_%version/init.py ([2d1aaf4](https://www.github.com/googleapis/python-memcache/commit/2d1aaf439d096857a727752ae129852b279c3658)) ### Documentation * omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-memcache/issues/1127)) ([#70](https://www.github.com/googleapis/python-memcache/issues/70)) ([f273025](https://www.github.com/googleapis/python-memcache/commit/f273025fedad32be0b766e40ab99b445f529cd13)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 18 ++++++++++++++++++ setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cd1c4f3..230bc71 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [1.1.0](https://www.github.com/googleapis/python-memcache/compare/v1.0.0...v1.1.0) (2021-06-30) + + +### Features + +* add always_use_jwt_access ([#79](https://www.github.com/googleapis/python-memcache/issues/79)) ([e7f03bb](https://www.github.com/googleapis/python-memcache/commit/e7f03bb915eb523afcb72ec0d2dd275739f485e5)) +* support self-signed JWT flow for service accounts ([2d1aaf4](https://www.github.com/googleapis/python-memcache/commit/2d1aaf439d096857a727752ae129852b279c3658)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([2d1aaf4](https://www.github.com/googleapis/python-memcache/commit/2d1aaf439d096857a727752ae129852b279c3658)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-memcache/issues/1127)) ([#70](https://www.github.com/googleapis/python-memcache/issues/70)) ([f273025](https://www.github.com/googleapis/python-memcache/commit/f273025fedad32be0b766e40ab99b445f529cd13)) + ## [1.0.0](https://www.github.com/googleapis/python-memcache/compare/v0.3.0...v1.0.0) (2021-05-28) diff --git a/setup.py b/setup.py index 610cebc..6c86f7a 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.0.0" +version = "1.1.0" package_root = os.path.abspath(os.path.dirname(__file__)) From 2afcab18e02535e6b65f0b9591a6a1f31a812dc5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Jul 2021 15:22:28 +0000 Subject: [PATCH 036/159] build(python): exit with success status if no samples found (#80) Source-Link: https://github.com/googleapis/synthtool/commit/53ea3896a52f87c758e79b5a19fa338c83925a98 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/test-samples-impl.sh | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index e2b39f9..a5d3697 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719 + digest: sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index cf5de74..311a8d5 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -20,9 +20,9 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" +# Exit early if samples don't exist +if ! find samples -name 'requirements.txt' | grep -q .; then + echo "No tests run. './samples/**/requirements.txt' not found" exit 0 fi From 95b0d16200b265740539ae081ab120e0107b5d11 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Jul 2021 17:54:23 +0000 Subject: [PATCH 037/159] build(python): remove python 3.7 from kokoro Dockerfile (#81) Source-Link: https://github.com/googleapis/synthtool/commit/e44dc0c742b1230887a73552357e0c18dcc30b92 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/docker/docs/Dockerfile | 35 ++------------------- .kokoro/docker/docs/fetch_gpg_keys.sh | 45 --------------------------- 3 files changed, 3 insertions(+), 79 deletions(-) delete mode 100755 .kokoro/docker/docs/fetch_gpg_keys.sh diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a5d3697..cb06536 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c + digest: sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 412b0b5..4e1b1fb 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -40,6 +40,7 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ + python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -59,40 +60,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb - -COPY fetch_gpg_keys.sh /tmp -# Install the desired versions of Python. -RUN set -ex \ - && export GNUPGHOME="$(mktemp -d)" \ - && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ - && /tmp/fetch_gpg_keys.sh \ - && for PYTHON_VERSION in 3.7.8 3.8.5; do \ - wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ - && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ - && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ - && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ - && mkdir -p /usr/src/python-${PYTHON_VERSION} \ - && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ - && rm python-${PYTHON_VERSION}.tar.xz \ - && cd /usr/src/python-${PYTHON_VERSION} \ - && ./configure \ - --enable-shared \ - # This works only on Python 2.7 and throws a warning on every other - # version, but seems otherwise harmless. - --enable-unicode=ucs4 \ - --with-system-ffi \ - --without-ensurepip \ - && make -j$(nproc) \ - && make install \ - && ldconfig \ - ; done \ - && rm -rf "${GNUPGHOME}" \ - && rm -rf /usr/src/python* \ - && rm -rf ~/.cache/ - RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.7 /tmp/get-pip.py \ && python3.8 /tmp/get-pip.py \ && rm /tmp/get-pip.py -CMD ["python3.7"] +CMD ["python3.8"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh deleted file mode 100755 index d653dd8..0000000 --- a/.kokoro/docker/docs/fetch_gpg_keys.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A script to fetch gpg keys with retry. -# Avoid jinja parsing the file. -# - -function retry { - if [[ "${#}" -le 1 ]]; then - echo "Usage: ${0} retry_count commands.." - exit 1 - fi - local retries=${1} - local command="${@:2}" - until [[ "${retries}" -le 0 ]]; do - $command && return 0 - if [[ $? -ne 0 ]]; then - echo "command failed, retrying" - ((retries--)) - fi - done - return 1 -} - -# 3.6.9, 3.7.5 (Ned Deily) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D - -# 3.8.0 (Ɓukasz Langa) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - E3FF2839C048B25C084DEBE9B26995E310250568 - -# From d4f2c965c13c28f97bda9aa8ab570529747bd68d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 20 Jul 2021 03:46:43 -0600 Subject: [PATCH 038/159] fix(deps): pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions (#82) Expand pins on library dependencies in preparation for these dependencies taking a new major version. See https://github.com/googleapis/google-cloud-python/issues/10566. --- setup.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6c86f7a..e693c80 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,10 @@ platforms="Posix; MacOS X; Windows", include_package_data=True, install_requires=( - "google-api-core[grpc] >= 1.26.0, <2.0.0dev", + # NOTE: Maintainers, please do not require google-api-core>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-api-core[grpc] >= 1.26.0, <3.0.0dev", "proto-plus >= 1.4.0", "packaging >= 14.3", ), From 94714851060def4b68ec065ae435b71ce94f41bc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 22 Jul 2021 14:10:34 +0000 Subject: [PATCH 039/159] feat: add Samples section to CONTRIBUTING.rst (#83) Source-Link: https://github.com/googleapis/synthtool/commit/52e4e46eff2a0b70e3ff5506a02929d089d077d4 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 --- .github/.OwlBot.lock.yaml | 2 +- CONTRIBUTING.rst | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index cb06536..d57f742 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d + digest: sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 9b2a1ca..0cb77b9 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -177,6 +177,30 @@ Build the docs via: $ nox -s docs +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** From c1ad4dc69a4638cc1d8774d3076d9f1b28d8642b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 23 Jul 2021 15:33:15 +0000 Subject: [PATCH 040/159] chore: fix kokoro config for samples (#86) Source-Link: https://github.com/googleapis/synthtool/commit/dd05f9d12f134871c9e45282349c9856fbebecdd Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/samples/python3.6/periodic-head.cfg | 2 +- .kokoro/samples/python3.7/periodic-head.cfg | 2 +- .kokoro/samples/python3.8/periodic-head.cfg | 2 +- .kokoro/samples/python3.9/periodic-head.cfg | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d57f742..9ee60f7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 + digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg index f9cfcd3..aa527a5 100644 --- a/.kokoro/samples/python3.6/periodic-head.cfg +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-memcache/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg index f9cfcd3..aa527a5 100644 --- a/.kokoro/samples/python3.7/periodic-head.cfg +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-memcache/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg index f9cfcd3..aa527a5 100644 --- a/.kokoro/samples/python3.8/periodic-head.cfg +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-memcache/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg index f9cfcd3..aa527a5 100644 --- a/.kokoro/samples/python3.9/periodic-head.cfg +++ b/.kokoro/samples/python3.9/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" + value: "github/python-memcache/.kokoro/test-samples-against-head.sh" } From 0ddd8eb6c91b799d443e4d09a20adcd25d9ef70a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 24 Jul 2021 10:18:12 +0000 Subject: [PATCH 041/159] fix: enable self signed jwt for grpc (#88) PiperOrigin-RevId: 386504689 Source-Link: https://github.com/googleapis/googleapis/commit/762094a99ac6e03a17516b13dfbef37927267a70 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6bfc480e1a161d5de121c2bcc3745885d33b265a --- .../services/cloud_memcache/client.py | 4 +++ .../services/cloud_memcache/client.py | 4 +++ .../gapic/memcache_v1/test_cloud_memcache.py | 31 +++++++++++-------- .../memcache_v1beta2/test_cloud_memcache.py | 31 +++++++++++-------- 4 files changed, 44 insertions(+), 26 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 3152eee..ed1cf94 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -366,6 +366,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def list_instances( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 65a2bc3..08b759f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -366,6 +366,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def list_instances( diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index c66c989..849c4d8 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -124,18 +124,6 @@ def test_cloud_memcache_client_from_service_account_info(client_class): assert client.transport._host == "memcache.googleapis.com:443" -@pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] -) -def test_cloud_memcache_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -143,7 +131,7 @@ def test_cloud_memcache_client_service_account_always_use_jwt(client_class): (transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_cloud_memcache_client_service_account_always_use_jwt_true( +def test_cloud_memcache_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -153,6 +141,13 @@ def test_cloud_memcache_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize( "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] @@ -233,6 +228,7 @@ def test_cloud_memcache_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -249,6 +245,7 @@ def test_cloud_memcache_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -265,6 +262,7 @@ def test_cloud_memcache_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -293,6 +291,7 @@ def test_cloud_memcache_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -359,6 +358,7 @@ def test_cloud_memcache_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -392,6 +392,7 @@ def test_cloud_memcache_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -413,6 +414,7 @@ def test_cloud_memcache_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -443,6 +445,7 @@ def test_cloud_memcache_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -473,6 +476,7 @@ def test_cloud_memcache_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -492,6 +496,7 @@ def test_cloud_memcache_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 7b29770..767f2d1 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -126,18 +126,6 @@ def test_cloud_memcache_client_from_service_account_info(client_class): assert client.transport._host == "memcache.googleapis.com:443" -@pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] -) -def test_cloud_memcache_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -145,7 +133,7 @@ def test_cloud_memcache_client_service_account_always_use_jwt(client_class): (transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_cloud_memcache_client_service_account_always_use_jwt_true( +def test_cloud_memcache_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -155,6 +143,13 @@ def test_cloud_memcache_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize( "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] @@ -235,6 +230,7 @@ def test_cloud_memcache_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -251,6 +247,7 @@ def test_cloud_memcache_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -267,6 +264,7 @@ def test_cloud_memcache_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -295,6 +293,7 @@ def test_cloud_memcache_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -361,6 +360,7 @@ def test_cloud_memcache_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -394,6 +394,7 @@ def test_cloud_memcache_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -415,6 +416,7 @@ def test_cloud_memcache_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -445,6 +447,7 @@ def test_cloud_memcache_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -475,6 +478,7 @@ def test_cloud_memcache_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -494,6 +498,7 @@ def test_cloud_memcache_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) From 31822078c9a27c26f303f51106ccb0af587a35e4 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 26 Jul 2021 13:36:21 -0600 Subject: [PATCH 042/159] chore: release as 1.1.1 (#87) Corrects release version to be patch instead of minor (to exclude docs change). Release-As: 1.1.1 From 65d98bf0e2118887f100643fd9d6cb9e710877b3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 27 Jul 2021 13:52:10 +0000 Subject: [PATCH 043/159] chore: release 1.1.1 (#89) :robot: I have created a release \*beep\* \*boop\* --- ### [1.1.1](https://www.github.com/googleapis/python-memcache/compare/v1.1.0...v1.1.1) (2021-07-26) ### Bug Fixes * **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#82](https://www.github.com/googleapis/python-memcache/issues/82)) ([d4f2c96](https://www.github.com/googleapis/python-memcache/commit/d4f2c965c13c28f97bda9aa8ab570529747bd68d)) * enable self signed jwt for grpc ([#88](https://www.github.com/googleapis/python-memcache/issues/88)) ([0ddd8eb](https://www.github.com/googleapis/python-memcache/commit/0ddd8eb6c91b799d443e4d09a20adcd25d9ef70a)) ### Documentation * add Samples section to CONTRIBUTING.rst ([#83](https://www.github.com/googleapis/python-memcache/issues/83)) ([9471485](https://www.github.com/googleapis/python-memcache/commit/94714851060def4b68ec065ae435b71ce94f41bc)) ### Miscellaneous Chores * release as 1.1.1 ([#87](https://www.github.com/googleapis/python-memcache/issues/87)) ([3182207](https://www.github.com/googleapis/python-memcache/commit/31822078c9a27c26f303f51106ccb0af587a35e4)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 18 ++++++++++++++++++ setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 230bc71..6f15b70 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +### [1.1.1](https://www.github.com/googleapis/python-memcache/compare/v1.1.0...v1.1.1) (2021-07-26) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#82](https://www.github.com/googleapis/python-memcache/issues/82)) ([d4f2c96](https://www.github.com/googleapis/python-memcache/commit/d4f2c965c13c28f97bda9aa8ab570529747bd68d)) +* enable self signed jwt for grpc ([#88](https://www.github.com/googleapis/python-memcache/issues/88)) ([0ddd8eb](https://www.github.com/googleapis/python-memcache/commit/0ddd8eb6c91b799d443e4d09a20adcd25d9ef70a)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#83](https://www.github.com/googleapis/python-memcache/issues/83)) ([9471485](https://www.github.com/googleapis/python-memcache/commit/94714851060def4b68ec065ae435b71ce94f41bc)) + + +### Miscellaneous Chores + +* release as 1.1.1 ([#87](https://www.github.com/googleapis/python-memcache/issues/87)) ([3182207](https://www.github.com/googleapis/python-memcache/commit/31822078c9a27c26f303f51106ccb0af587a35e4)) + ## [1.1.0](https://www.github.com/googleapis/python-memcache/compare/v1.0.0...v1.1.0) (2021-06-30) diff --git a/setup.py b/setup.py index e693c80..c6b3c90 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.1.0" +version = "1.1.1" package_root = os.path.abspath(os.path.dirname(__file__)) From 7490e7d5ec89542578126724ecb5e696660bb724 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 12 Aug 2021 15:55:02 +0000 Subject: [PATCH 044/159] chore(python): avoid `.nox` directories when building docs (#91) Source-Link: https://github.com/googleapis/synthtool/commit/7e1f6da50524b5d98eb67adbf6dd0805df54233d Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 9ee60f7..b771c37 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b + digest: sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d diff --git a/docs/conf.py b/docs/conf.py index c592f75..5853df4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -110,6 +110,7 @@ # directories to ignore when looking for source files. exclude_patterns = [ "_build", + "**/.nox/**/*", "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md", "samples/snippets/README.rst", From ca75f31bad4d9e35940a2d4b6271217bc58eafdb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Aug 2021 11:39:10 -0400 Subject: [PATCH 045/159] chore: drop mention of Python 2.7 from templates (#92) Source-Link: https://github.com/googleapis/synthtool/commit/facee4cc1ea096cd8bcc008bb85929daa7c414c0 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- noxfile.py | 12 +++++++++--- scripts/readme-gen/templates/install_deps.tmpl.rst | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b771c37..a9fcd07 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d + digest: sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803 diff --git a/noxfile.py b/noxfile.py index d896e7f..935a924 100644 --- a/noxfile.py +++ b/noxfile.py @@ -84,9 +84,15 @@ def default(session): constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - - session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + session.install( + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", + "-c", + constraints_path, + ) session.install("-e", ".", "-c", constraints_path) diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst index a0406db..275d649 100644 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash From 413dc5eb8b399f2ac66f47b334b0703dd4adc51c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 18 Aug 2021 07:40:59 -0600 Subject: [PATCH 046/159] chore: generate python samples templates in owlbot.py (#93) Generate python samples templates in owlbot.py --- owlbot.py | 1 + 1 file changed, 1 insertion(+) diff --git a/owlbot.py b/owlbot.py index fb3ae3a..6ae69bc 100644 --- a/owlbot.py +++ b/owlbot.py @@ -37,6 +37,7 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(cov_level=100, microgenerator=True) +python.py_samples(skip_readmes=True) s.move( templated_files, excludes=[".coveragerc"] ) # the microgenerator has a good coveragerc file From df401543c41c13d2adb7d1d933eba9f882be1016 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Aug 2021 15:38:19 +0000 Subject: [PATCH 047/159] chore(python): disable dependency dashboard (#95) --- .github/.OwlBot.lock.yaml | 2 +- renovate.json | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a9fcd07..b75186c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803 + digest: sha256:d6761eec279244e57fe9d21f8343381a01d3632c034811a72f68b83119e58c69 diff --git a/renovate.json b/renovate.json index c048955..9fa8816 100644 --- a/renovate.json +++ b/renovate.json @@ -1,6 +1,8 @@ { "extends": [ - "config:base", ":preserveSemverRanges" + "config:base", + ":preserveSemverRanges", + ":disableDependencyDashboard" ], "ignorePaths": [".pre-commit-config.yaml"], "pip_requirements": { From 327d9ebd21d0e6b563ff44b7daf16f0929abfd0b Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 30 Aug 2021 16:23:30 -0400 Subject: [PATCH 048/159] chore: migrate to main branch (#97) --- .kokoro/build.sh | 2 +- .kokoro/test-samples-impl.sh | 2 +- CONTRIBUTING.rst | 12 ++++++------ owlbot.py | 29 +++++++++++++++++++++++++++++ 4 files changed, 37 insertions(+), 8 deletions(-) diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 03a459c..67faddb 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -41,7 +41,7 @@ python3 -m pip install --upgrade --quiet nox python3 -m nox --version # If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then cleanup() { chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 311a8d5..8a324c9 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -80,7 +80,7 @@ for file in samples/**/requirements.txt; do EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 0cb77b9..3c14905 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -50,9 +50,9 @@ You'll have to create a development environment using a Git checkout: # Configure remotes such that you can pull changes from the googleapis/python-memcache # repository into your local repository. $ git remote add upstream git@github.com:googleapis/python-memcache.git - # fetch and merge changes from upstream into master + # fetch and merge changes from upstream into main $ git fetch upstream - $ git merge upstream/master + $ git merge upstream/main Now your local repo is set up such that you will push changes to your GitHub repo, from which you can submit a pull request. @@ -110,12 +110,12 @@ Coding Style variables:: export GOOGLE_CLOUD_TESTING_REMOTE="upstream" - export GOOGLE_CLOUD_TESTING_BRANCH="master" + export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date version of ``python-memcache``. The the suggested remote name ``upstream`` should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``master``). + the branch should be the main branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking @@ -209,7 +209,7 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-memcache/blob/master/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-memcache/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-cloud-memcache @@ -234,7 +234,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-memcache/blob/master/noxfile.py +.. _config: https://github.com/googleapis/python-memcache/blob/main/noxfile.py We also explicitly decided to support Python 3 beginning with version 3.6. diff --git a/owlbot.py b/owlbot.py index 6ae69bc..c10feed 100644 --- a/owlbot.py +++ b/owlbot.py @@ -43,3 +43,32 @@ ) # the microgenerator has a good coveragerc file s.shell.run(["nox", "-s", "blacken"], hide_output=False) + +# ---------------------------------------------------------------------------- +# Main Branch migration +# ---------------------------------------------------------------------------- + +s.replace( + "*.rst", + "master", + "main" +) + +s.replace( + "*.rst", + "google-cloud-python/blob/main", + "google-cloud-python/blob/master" +) + +s.replace( + "CONTRIBUTING.rst", + "kubernetes/community/blob/main", + "kubernetes/community/blob/master" +) + +s.replace( + ".kokoro/*", + "master", + "main" +) + From ef1e3ae936c49494842771377155274d8e0d4cff Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 1 Sep 2021 15:44:34 +0000 Subject: [PATCH 049/159] chore(python): group renovate prs (#98) --- .github/.OwlBot.lock.yaml | 2 +- renovate.json | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b75186c..ef3cb34 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:d6761eec279244e57fe9d21f8343381a01d3632c034811a72f68b83119e58c69 + digest: sha256:1456ea2b3b523ccff5e13030acef56d1de28f21249c62aa0f196265880338fa7 diff --git a/renovate.json b/renovate.json index 9fa8816..c21036d 100644 --- a/renovate.json +++ b/renovate.json @@ -1,6 +1,7 @@ { "extends": [ "config:base", + "group:all", ":preserveSemverRanges", ":disableDependencyDashboard" ], From 16380881b5a1d89ccd8a12b1f55327f04a557f6e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 2 Sep 2021 16:51:59 -0600 Subject: [PATCH 050/159] chore(python): rename default branch to main (#99) Source-Link: https://github.com/googleapis/synthtool/commit/5c0fa62eea9c33ebe61e582424b659eb264e1ba4 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- CONTRIBUTING.rst | 6 +++--- docs/conf.py | 10 +++++----- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ef3cb34..c07f148 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:1456ea2b3b523ccff5e13030acef56d1de28f21249c62aa0f196265880338fa7 + digest: sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 3c14905..7aeedc1 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -113,9 +113,9 @@ Coding Style export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date - version of ``python-memcache``. The the suggested remote name ``upstream`` - should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``main``). + version of ``python-memcache``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking diff --git a/docs/conf.py b/docs/conf.py index 5853df4..989f4ee 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -76,8 +76,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = "google-cloud-memcache" @@ -280,7 +280,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-memcache.tex", "google-cloud-memcache Documentation", author, @@ -315,7 +315,7 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-cloud-memcache", "google-cloud-memcache Documentation", [author], @@ -334,7 +334,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-memcache", "google-cloud-memcache Documentation", author, From ceae130cea122a819a51869bb85a6ca6b857bda0 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 7 Sep 2021 11:06:10 -0400 Subject: [PATCH 051/159] chore: revert owlbot main branch templates (#100) Reverts main branch template for OwlBot that were included for the main branch integration. --- owlbot.py | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/owlbot.py b/owlbot.py index c10feed..e32ab90 100644 --- a/owlbot.py +++ b/owlbot.py @@ -44,31 +44,3 @@ s.shell.run(["nox", "-s", "blacken"], hide_output=False) -# ---------------------------------------------------------------------------- -# Main Branch migration -# ---------------------------------------------------------------------------- - -s.replace( - "*.rst", - "master", - "main" -) - -s.replace( - "*.rst", - "google-cloud-python/blob/main", - "google-cloud-python/blob/master" -) - -s.replace( - "CONTRIBUTING.rst", - "kubernetes/community/blob/main", - "kubernetes/community/blob/master" -) - -s.replace( - ".kokoro/*", - "master", - "main" -) - From 67db90b54e5bd4fe9c8792d55115029d9fc204d3 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 7 Sep 2021 11:46:39 -0600 Subject: [PATCH 052/159] chore: reference main branch of google-cloud-python (#101) --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 74d0643..f643869 100644 --- a/README.rst +++ b/README.rst @@ -10,7 +10,7 @@ compatible with OSS Memcached protocol. - `Product Documentation`_ .. |GA| image:: https://img.shields.io/badge/support-ga-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-memcache.svg :target: https://pypi.org/project/google-cloud-memcache/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-memcache.svg @@ -80,4 +80,4 @@ Next Steps APIs that we cover. .. _Cloud Memorystore for Memcached API Product documentation: https://cloud.google.com/memorystore/docs/memcached/ -.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file +.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst \ No newline at end of file From fb72cd188c819c416f868e224cfc12db9fe5a1c4 Mon Sep 17 00:00:00 2001 From: Jeffrey Rennie Date: Tue, 21 Sep 2021 12:38:32 -0700 Subject: [PATCH 053/159] chore: relocate owl bot post processor (#103) chore: relocate owl bot post processor --- .github/.OwlBot.lock.yaml | 4 ++-- .github/.OwlBot.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c07f148..2567653 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml index 1f364a6..c431d29 100644 --- a/.github/.OwlBot.yaml +++ b/.github/.OwlBot.yaml @@ -13,7 +13,7 @@ # limitations under the License. docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest deep-remove-regex: - /owl-bot-staging From c56fbee0ffedac37a80bca5ca3028c53753ada5a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 24 Sep 2021 15:10:12 +0000 Subject: [PATCH 054/159] chore: use gapic-generator-python 0.51.2 (#104) - [ ] Regenerate this pull request now. fix: add 'dict' annotation type to 'request' Committer: @busunkim96 PiperOrigin-RevId: 398509016 Source-Link: https://github.com/googleapis/googleapis/commit/b224dfa52642a733ea64849d4e06d15c274bc08f Source-Link: https://github.com/googleapis/googleapis-gen/commit/63a1db7a38d74b9639592f521ed1daaf7299ad9a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjNhMWRiN2EzOGQ3NGI5NjM5NTkyZjUyMWVkMWRhYWY3Mjk5YWQ5YSJ9 --- .../services/cloud_memcache/client.py | 30 ++++++++-------- .../cloud_memcache/transports/base.py | 2 +- .../cloud_memcache/transports/grpc.py | 6 ++-- .../cloud_memcache/transports/grpc_asyncio.py | 6 ++-- .../services/cloud_memcache/client.py | 34 +++++++++---------- .../cloud_memcache/transports/base.py | 2 +- .../cloud_memcache/transports/grpc.py | 6 ++-- .../cloud_memcache/transports/grpc_asyncio.py | 6 ++-- scripts/fixup_memcache_v1_keywords.py | 16 ++++----- scripts/fixup_memcache_v1beta2_keywords.py | 18 +++++----- 10 files changed, 63 insertions(+), 63 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index ed1cf94..6ef32d3 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -374,7 +374,7 @@ def __init__( def list_instances( self, - request: cloud_memcache.ListInstancesRequest = None, + request: Union[cloud_memcache.ListInstancesRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -384,7 +384,7 @@ def list_instances( r"""Lists Instances in a given location. Args: - request (google.cloud.memcache_v1.types.ListInstancesRequest): + request (Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]): The request object. Request for [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. parent (str): @@ -456,7 +456,7 @@ def list_instances( def get_instance( self, - request: cloud_memcache.GetInstanceRequest = None, + request: Union[cloud_memcache.GetInstanceRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -466,7 +466,7 @@ def get_instance( r"""Gets details of a single Instance. Args: - request (google.cloud.memcache_v1.types.GetInstanceRequest): + request (Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]): The request object. Request for [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. name (str): @@ -527,7 +527,7 @@ def get_instance( def create_instance( self, - request: cloud_memcache.CreateInstanceRequest = None, + request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, *, parent: str = None, instance: cloud_memcache.Instance = None, @@ -539,7 +539,7 @@ def create_instance( r"""Creates a new Instance in a given location. Args: - request (google.cloud.memcache_v1.types.CreateInstanceRequest): + request (Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]): The request object. Request for [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. parent (str): @@ -638,7 +638,7 @@ def create_instance( def update_instance( self, - request: cloud_memcache.UpdateInstanceRequest = None, + request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, *, instance: cloud_memcache.Instance = None, update_mask: field_mask_pb2.FieldMask = None, @@ -650,7 +650,7 @@ def update_instance( location. Args: - request (google.cloud.memcache_v1.types.UpdateInstanceRequest): + request (Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]): The request object. Request for [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. instance (google.cloud.memcache_v1.types.Instance): @@ -733,7 +733,7 @@ def update_instance( def update_parameters( self, - request: cloud_memcache.UpdateParametersRequest = None, + request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, *, name: str = None, update_mask: field_mask_pb2.FieldMask = None, @@ -748,7 +748,7 @@ def update_parameters( apply the parameters to nodes of the Memcached Instance. Args: - request (google.cloud.memcache_v1.types.UpdateParametersRequest): + request (Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]): The request object. Request for [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. name (str): @@ -836,7 +836,7 @@ def update_parameters( def delete_instance( self, - request: cloud_memcache.DeleteInstanceRequest = None, + request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -846,7 +846,7 @@ def delete_instance( r"""Deletes a single Instance. Args: - request (google.cloud.memcache_v1.types.DeleteInstanceRequest): + request (Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]): The request object. Request for [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. name (str): @@ -930,7 +930,7 @@ def delete_instance( def apply_parameters( self, - request: cloud_memcache.ApplyParametersRequest = None, + request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, *, name: str = None, node_ids: Sequence[str] = None, @@ -944,7 +944,7 @@ def apply_parameters( parameters for the Memcached Instance. Args: - request (google.cloud.memcache_v1.types.ApplyParametersRequest): + request (Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]): The request object. Request for [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. name (str): diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 041aa53..e858b87 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -117,7 +117,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index c4c4827..e6d8a72 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -100,16 +100,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index 8e1ad32..f8ce3db 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -147,16 +147,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 08b759f..fa4815e 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -374,7 +374,7 @@ def __init__( def list_instances( self, - request: cloud_memcache.ListInstancesRequest = None, + request: Union[cloud_memcache.ListInstancesRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -384,7 +384,7 @@ def list_instances( r"""Lists Instances in a given location. Args: - request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): + request (Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. parent (str): @@ -456,7 +456,7 @@ def list_instances( def get_instance( self, - request: cloud_memcache.GetInstanceRequest = None, + request: Union[cloud_memcache.GetInstanceRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -466,7 +466,7 @@ def get_instance( r"""Gets details of a single Instance. Args: - request (google.cloud.memcache_v1beta2.types.GetInstanceRequest): + request (Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. name (str): @@ -527,7 +527,7 @@ def get_instance( def create_instance( self, - request: cloud_memcache.CreateInstanceRequest = None, + request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, *, parent: str = None, instance_id: str = None, @@ -539,7 +539,7 @@ def create_instance( r"""Creates a new Instance in a given location. Args: - request (google.cloud.memcache_v1beta2.types.CreateInstanceRequest): + request (Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. parent (str): @@ -639,7 +639,7 @@ def create_instance( def update_instance( self, - request: cloud_memcache.UpdateInstanceRequest = None, + request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, *, update_mask: field_mask_pb2.FieldMask = None, resource: cloud_memcache.Instance = None, @@ -651,7 +651,7 @@ def update_instance( location. Args: - request (google.cloud.memcache_v1beta2.types.UpdateInstanceRequest): + request (Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -735,7 +735,7 @@ def update_instance( def update_parameters( self, - request: cloud_memcache.UpdateParametersRequest = None, + request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, *, name: str = None, update_mask: field_mask_pb2.FieldMask = None, @@ -750,7 +750,7 @@ def update_parameters( of the Memcached instance. Args: - request (google.cloud.memcache_v1beta2.types.UpdateParametersRequest): + request (Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. name (str): @@ -839,7 +839,7 @@ def update_parameters( def delete_instance( self, - request: cloud_memcache.DeleteInstanceRequest = None, + request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -849,7 +849,7 @@ def delete_instance( r"""Deletes a single Instance. Args: - request (google.cloud.memcache_v1beta2.types.DeleteInstanceRequest): + request (Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. name (str): @@ -933,7 +933,7 @@ def delete_instance( def apply_parameters( self, - request: cloud_memcache.ApplyParametersRequest = None, + request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, *, name: str = None, node_ids: Sequence[str] = None, @@ -947,7 +947,7 @@ def apply_parameters( Memcached Instance. Args: - request (google.cloud.memcache_v1beta2.types.ApplyParametersRequest): + request (Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. name (str): @@ -1040,7 +1040,7 @@ def apply_parameters( def apply_software_update( self, - request: cloud_memcache.ApplySoftwareUpdateRequest = None, + request: Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] = None, *, instance: str = None, node_ids: Sequence[str] = None, @@ -1053,7 +1053,7 @@ def apply_software_update( Instance. Args: - request (google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest): + request (Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]): The request object. Request for [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. instance (str): diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index ad750d6..a403524 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -117,7 +117,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index b92b70e..6984117 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -100,16 +100,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index 379650b..13b49de 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -147,16 +147,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py index 774b03f..4acdee8 100644 --- a/scripts/fixup_memcache_v1_keywords.py +++ b/scripts/fixup_memcache_v1_keywords.py @@ -39,13 +39,13 @@ def partition( class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'instance', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'instance', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -64,7 +64,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index afe0f42..59d2e11 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -39,14 +39,14 @@ def partition( class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'resource', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'resource', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'resource', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'resource', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -65,7 +65,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) From ddb75f5bea82fa46dd3c692c682bb0591c6a5a30 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 27 Sep 2021 15:50:16 +0000 Subject: [PATCH 055/159] chore: release 1.1.2 (#105) :robot: I have created a release \*beep\* \*boop\* --- ### [1.1.2](https://www.github.com/googleapis/python-memcache/compare/v1.1.1...v1.1.2) (2021-09-24) ### Bug Fixes * add 'dict' annotation type to 'request' ([c56fbee](https://www.github.com/googleapis/python-memcache/commit/c56fbee0ffedac37a80bca5ca3028c53753ada5a)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f15b70..7615bdd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [1.1.2](https://www.github.com/googleapis/python-memcache/compare/v1.1.1...v1.1.2) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([c56fbee](https://www.github.com/googleapis/python-memcache/commit/c56fbee0ffedac37a80bca5ca3028c53753ada5a)) + ### [1.1.1](https://www.github.com/googleapis/python-memcache/compare/v1.1.0...v1.1.1) (2021-07-26) diff --git a/setup.py b/setup.py index c6b3c90..edd2511 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.1.1" +version = "1.1.2" package_root = os.path.abspath(os.path.dirname(__file__)) From 3680bac8c702cc0313b06dbec3c0c6512ac4a58a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 09:40:26 +0000 Subject: [PATCH 056/159] chore: use gapic-generator-python 0.52.0 (#106) - [ ] Regenerate this pull request now. fix: improper types in pagers generation PiperOrigin-RevId: 399773015 Source-Link: https://github.com/googleapis/googleapis/commit/410c184536a22fadaf00aec3cab04102e34d2322 Source-Link: https://github.com/googleapis/googleapis-gen/commit/290e883545e3ac9ff2bd00cd0dacb28f1b8ca945 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjkwZTg4MzU0NWUzYWM5ZmYyYmQwMGNkMGRhY2IyOGYxYjhjYTk0NSJ9 --- .../memcache_v1/services/cloud_memcache/pagers.py | 12 ++++++------ .../services/cloud_memcache/pagers.py | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py index 7723778..68e9242 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.memcache_v1.types import cloud_memcache @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[cloud_memcache.ListInstancesResponse]: + def pages(self) -> Iterator[cloud_memcache.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[cloud_memcache.Instance]: + def __iter__(self) -> Iterator[cloud_memcache.Instance]: for page in self.pages: yield from page.instances @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[cloud_memcache.ListInstancesResponse]: + async def pages(self) -> AsyncIterator[cloud_memcache.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[cloud_memcache.Instance]: + def __aiter__(self) -> AsyncIterator[cloud_memcache.Instance]: async def async_generator(): async for page in self.pages: for response in page.instances: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index 381459d..617b0a7 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.memcache_v1beta2.types import cloud_memcache @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[cloud_memcache.ListInstancesResponse]: + def pages(self) -> Iterator[cloud_memcache.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[cloud_memcache.Instance]: + def __iter__(self) -> Iterator[cloud_memcache.Instance]: for page in self.pages: yield from page.resources @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[cloud_memcache.ListInstancesResponse]: + async def pages(self) -> AsyncIterator[cloud_memcache.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[cloud_memcache.Instance]: + def __aiter__(self) -> AsyncIterator[cloud_memcache.Instance]: async def async_generator(): async for page in self.pages: for response in page.resources: From acf5a6697b9f5ab0316249f4a23958e39fc017e4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 15:28:28 +0000 Subject: [PATCH 057/159] chore: release 1.1.3 (#107) :robot: I have created a release \*beep\* \*boop\* --- ### [1.1.3](https://www.github.com/googleapis/python-memcache/compare/v1.1.2...v1.1.3) (2021-09-30) ### Bug Fixes * improper types in pagers generation ([3680bac](https://www.github.com/googleapis/python-memcache/commit/3680bac8c702cc0313b06dbec3c0c6512ac4a58a)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7615bdd..abfc885 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [1.1.3](https://www.github.com/googleapis/python-memcache/compare/v1.1.2...v1.1.3) (2021-09-30) + + +### Bug Fixes + +* improper types in pagers generation ([3680bac](https://www.github.com/googleapis/python-memcache/commit/3680bac8c702cc0313b06dbec3c0c6512ac4a58a)) + ### [1.1.2](https://www.github.com/googleapis/python-memcache/compare/v1.1.1...v1.1.2) (2021-09-24) diff --git a/setup.py b/setup.py index edd2511..4e3fc3d 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.1.2" +version = "1.1.3" package_root = os.path.abspath(os.path.dirname(__file__)) From 8b8a955b82af71ee92b77076d0ab1f3ecbda6fc9 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 5 Oct 2021 11:17:39 -0400 Subject: [PATCH 058/159] chore: add default_version and codeowner_team to .repo-metadata.json (#109) * chore: add default_version and codeowner_team to .repo-metadata.json * update default_version --- .repo-metadata.json | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index 718be57..6fced9a 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -9,5 +9,7 @@ "library_type": "GAPIC_AUTO", "repo": "googleapis/python-memcache", "distribution_name": "google-cloud-memcache", - "api_id": "memcache.googleapis.com" - } \ No newline at end of file + "api_id": "memcache.googleapis.com", + "default_version": "v1", + "codeowner_team": "" +} From 6613ca681313d2b1ce2643b109c773db0e906617 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 11:26:01 -0600 Subject: [PATCH 059/159] build: use trampoline_v2 for python samples and allow custom dockerfile (#110) Source-Link: https://github.com/googleapis/synthtool/commit/a7ed11ec0863c422ba2e73aafa75eab22c32b33d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/samples/lint/common.cfg | 2 +- .kokoro/samples/python3.6/common.cfg | 2 +- .kokoro/samples/python3.6/periodic.cfg | 2 +- .kokoro/samples/python3.7/common.cfg | 2 +- .kokoro/samples/python3.7/periodic.cfg | 2 +- .kokoro/samples/python3.8/common.cfg | 2 +- .kokoro/samples/python3.8/periodic.cfg | 2 +- .kokoro/samples/python3.9/common.cfg | 2 +- .kokoro/samples/python3.9/periodic.cfg | 2 +- .kokoro/test-samples-against-head.sh | 2 -- .kokoro/test-samples.sh | 2 -- .trampolinerc | 17 ++++++++++++++--- 13 files changed, 24 insertions(+), 17 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2567653..ee94722 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a + digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg index 31f0ad7..2672aa9 100644 --- a/.kokoro/samples/lint/common.cfg +++ b/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index bfd6190..8210984 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg index 50fec96..71cd1e5 100644 --- a/.kokoro/samples/python3.6/periodic.cfg +++ b/.kokoro/samples/python3.6/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index a7be3aa..f13d51d 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg index 50fec96..71cd1e5 100644 --- a/.kokoro/samples/python3.7/periodic.cfg +++ b/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index ec8abd3..a89f1b2 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg index 50fec96..71cd1e5 100644 --- a/.kokoro/samples/python3.8/periodic.cfg +++ b/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg index 7d86218..11f9533 100644 --- a/.kokoro/samples/python3.9/common.cfg +++ b/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg index 50fec96..71cd1e5 100644 --- a/.kokoro/samples/python3.9/periodic.cfg +++ b/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index 113d9c2..ba3a707 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -23,6 +23,4 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-memcache - exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index e1c8a45..11c042d 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -24,8 +24,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-memcache - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then # preserving the test runner implementation. diff --git a/.trampolinerc b/.trampolinerc index 383b6ec..0eee72a 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -16,15 +16,26 @@ # Add required env vars here. required_envvars+=( - "STAGING_BUCKET" - "V2_STAGING_BUCKET" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" ) # Prevent unintentional override on the default image. From a385b993b2473a01256042cc2c560f872c6b8c13 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 7 Oct 2021 00:34:28 +0000 Subject: [PATCH 060/159] feat: add context manager support in client (#111) - [ ] Regenerate this pull request now. chore: fix docstring for first attribute of protos committer: @busunkim96 PiperOrigin-RevId: 401271153 Source-Link: https://github.com/googleapis/googleapis/commit/787f8c9a731f44e74a90b9847d48659ca9462d10 Source-Link: https://github.com/googleapis/googleapis-gen/commit/81decffe9fc72396a8153e756d1d67a6eecfd620 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODFkZWNmZmU5ZmM3MjM5NmE4MTUzZTc1NmQxZDY3YTZlZWNmZDYyMCJ9 --- .../services/cloud_memcache/async_client.py | 6 +++ .../services/cloud_memcache/client.py | 18 +++++-- .../cloud_memcache/transports/base.py | 9 ++++ .../cloud_memcache/transports/grpc.py | 3 ++ .../cloud_memcache/transports/grpc_asyncio.py | 3 ++ .../cloud/memcache_v1/types/cloud_memcache.py | 6 +++ .../services/cloud_memcache/async_client.py | 6 +++ .../services/cloud_memcache/client.py | 18 +++++-- .../cloud_memcache/transports/base.py | 9 ++++ .../cloud_memcache/transports/grpc.py | 3 ++ .../cloud_memcache/transports/grpc_asyncio.py | 3 ++ .../memcache_v1beta2/types/cloud_memcache.py | 8 ++- .../gapic/memcache_v1/test_cloud_memcache.py | 50 +++++++++++++++++++ .../memcache_v1beta2/test_cloud_memcache.py | 50 +++++++++++++++++++ 14 files changed, 183 insertions(+), 9 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 738a2db..86d638a 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -849,6 +849,12 @@ async def apply_parameters( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 6ef32d3..f1864ad 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -366,10 +366,7 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_instances( @@ -1036,6 +1033,19 @@ def apply_parameters( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index e858b87..f5e651e 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -177,6 +177,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index e6d8a72..7da9600 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -450,5 +450,8 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + def close(self): + self.grpc_channel.close() + __all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index f8ce3db..be0a27c 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -468,5 +468,8 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + def close(self): + return self.grpc_channel.close() + __all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index 2a6777e..ad22da1 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -46,6 +46,7 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): r""" + Attributes: name (str): Required. Unique name of the resource in this scope @@ -129,6 +130,7 @@ class State(proto.Enum): class NodeConfig(proto.Message): r"""Configuration for a Memcached Node. + Attributes: cpu_count (int): Required. Number of cpus per Memcached node. @@ -142,6 +144,7 @@ class NodeConfig(proto.Message): class Node(proto.Message): r""" + Attributes: node_id (str): Output only. Identifier of the Memcached @@ -182,6 +185,7 @@ class State(proto.Enum): class InstanceMessage(proto.Message): r""" + Attributes: code (google.cloud.memcache_v1.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- @@ -417,6 +421,7 @@ class UpdateParametersRequest(proto.Message): class MemcacheParameters(proto.Message): r""" + Attributes: id (str): Output only. The unique ID associated with @@ -436,6 +441,7 @@ class MemcacheParameters(proto.Message): class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. + Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 4417473..edb5240 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -960,6 +960,12 @@ async def apply_software_update( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index fa4815e..7b10707 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -366,10 +366,7 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_instances( @@ -1147,6 +1144,19 @@ def apply_software_update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index a403524..ade194d 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -182,6 +182,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 6984117..53e729d 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -479,5 +479,8 @@ def apply_software_update( ) return self._stubs["apply_software_update"] + def close(self): + self.grpc_channel.close() + __all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index 13b49de..46bec80 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -497,5 +497,8 @@ def apply_software_update( ) return self._stubs["apply_software_update"] + def close(self): + return self.grpc_channel.close() + __all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 6fdd3b8..0ae9e80 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -49,6 +49,7 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): r"""A Memorystore for Memcached instance + Attributes: name (str): Required. Unique name of the resource in this scope @@ -138,6 +139,7 @@ class State(proto.Enum): class NodeConfig(proto.Message): r"""Configuration for a Memcached Node. + Attributes: cpu_count (int): Required. Number of cpus per Memcached node. @@ -151,6 +153,7 @@ class NodeConfig(proto.Message): class Node(proto.Message): r""" + Attributes: node_id (str): Output only. Identifier of the Memcached @@ -195,6 +198,7 @@ class State(proto.Enum): class InstanceMessage(proto.Message): r""" + Attributes: code (google.cloud.memcache_v1beta2.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- @@ -475,6 +479,7 @@ class MemcacheParameters(proto.Message): class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. + Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -529,7 +534,8 @@ class LocationMetadata(proto.Message): class ZoneMetadata(proto.Message): - r""" """ + r""" + """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 849c4d8..0c0d3ed 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -32,6 +32,7 @@ from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheAsyncClient @@ -2274,6 +2275,9 @@ def test_cloud_memcache_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): @@ -2785,3 +2789,49 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 767f2d1..392d652 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -32,6 +32,7 @@ from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.memcache_v1beta2.services.cloud_memcache import ( @@ -2510,6 +2511,9 @@ def test_cloud_memcache_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): @@ -3021,3 +3025,49 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() From 37bff00ff5de47025e0d96c27f5380cf021ee2b4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 7 Oct 2021 19:24:31 +0000 Subject: [PATCH 061/159] chore(python): fix formatting issue in noxfile.py.j2 (#113) --- .github/.OwlBot.lock.yaml | 2 +- CONTRIBUTING.rst | 6 ++++-- noxfile.py | 2 +- testing/constraints-3.10.txt | 0 testing/constraints-3.11.txt | 0 5 files changed, 6 insertions(+), 4 deletions(-) create mode 100644 testing/constraints-3.10.txt create mode 100644 testing/constraints-3.11.txt diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ee94722..76d0baa 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc + digest: sha256:4370ced27a324687ede5da07132dcdc5381993502a5e8a3e31e16dc631d026f0 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7aeedc1..2718ea6 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. + 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.9 -- -k + $ nox -s unit-3.10 -- -k .. note:: @@ -225,11 +225,13 @@ We support: - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ +- `Python 3.10`_ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/noxfile.py b/noxfile.py index 935a924..2bb4cf7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -29,7 +29,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt new file mode 100644 index 0000000..e69de29 From ec9449acfffcc3e3b02923a9e0cd579f37a602c5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 8 Oct 2021 18:20:19 +0000 Subject: [PATCH 062/159] chore(python): Add kokoro configs for python 3.10 samples testing (#114) --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/samples/python3.10/common.cfg | 40 ++++++++++++++++++++ .kokoro/samples/python3.10/continuous.cfg | 6 +++ .kokoro/samples/python3.10/periodic-head.cfg | 11 ++++++ .kokoro/samples/python3.10/periodic.cfg | 6 +++ .kokoro/samples/python3.10/presubmit.cfg | 6 +++ 6 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 .kokoro/samples/python3.10/common.cfg create mode 100644 .kokoro/samples/python3.10/continuous.cfg create mode 100644 .kokoro/samples/python3.10/periodic-head.cfg create mode 100644 .kokoro/samples/python3.10/periodic.cfg create mode 100644 .kokoro/samples/python3.10/presubmit.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 76d0baa..7d98291 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4370ced27a324687ede5da07132dcdc5381993502a5e8a3e31e16dc631d026f0 + digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg new file mode 100644 index 0000000..e6768d6 --- /dev/null +++ b/.kokoro/samples/python3.10/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.10" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-310" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.10/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg new file mode 100644 index 0000000..aa527a5 --- /dev/null +++ b/.kokoro/samples/python3.10/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg new file mode 100644 index 0000000..71cd1e5 --- /dev/null +++ b/.kokoro/samples/python3.10/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.10/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file From 0aac9840cf6d0d4b9bdc7bb2b9bdc804d7649de8 Mon Sep 17 00:00:00 2001 From: Anurag Kumar Date: Tue, 12 Oct 2021 21:11:41 +0530 Subject: [PATCH 063/159] chore: add Python 3.9 and 3.10 to trove classifiers (#115) --- setup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.py b/setup.py index 4e3fc3d..d92bdfc 100644 --- a/setup.py +++ b/setup.py @@ -55,6 +55,8 @@ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: Internet", "Topic :: Software Development :: Libraries :: Python Modules", ], From 266772902379cd7c3d048fa4474e23b9afbe5131 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 12 Oct 2021 11:21:32 -0600 Subject: [PATCH 064/159] chore: release 1.2.0 (#112) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index abfc885..f5452d1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.2.0](https://www.github.com/googleapis/python-memcache/compare/v1.1.3...v1.2.0) (2021-10-12) + + +### Features + +* add context manager support in client ([#111](https://www.github.com/googleapis/python-memcache/issues/111)) ([a385b99](https://www.github.com/googleapis/python-memcache/commit/a385b993b2473a01256042cc2c560f872c6b8c13)) + ### [1.1.3](https://www.github.com/googleapis/python-memcache/compare/v1.1.2...v1.1.3) (2021-09-30) diff --git a/setup.py b/setup.py index d92bdfc..3a654fe 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.1.3" +version = "1.2.0" package_root = os.path.abspath(os.path.dirname(__file__)) From ca4665a4ebb0721c1063d48ee30c42bed2efba29 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 18 Oct 2021 10:59:49 -0400 Subject: [PATCH 065/159] chore: delete owlbot.py (#117) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: delete owlbot.py * 🩉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- docs/index.rst | 27 ++++++++++------------- owlbot.py | 46 --------------------------------------- 3 files changed, 13 insertions(+), 62 deletions(-) delete mode 100644 owlbot.py diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7d98291..ba7b2f7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b + digest: sha256:3728d8fd14daa46a96d04ce61c6451a3ac864dc48fb71eecbb4411f4a95618d4 diff --git a/docs/index.rst b/docs/index.rst index 56540c0..28d8305 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,28 +2,25 @@ .. include:: multiprocessing.rst -This package includes clients for multiple versions of the Memcache -API. By default, you will get ``v1``, the latest GA version. +This package includes clients for multiple versions of Cloud Memorystore for Memcached. +By default, you will get version ``memcache_v1``. -memcache_v1 API Reference -------------------------- +API Reference +------------- .. toctree:: :maxdepth: 2 - Client (v1) - Types (v1) - - -memcache_v1beta2 API Reference ------------------------------- + memcache_v1/services + memcache_v1/types +API Reference +------------- .. toctree:: :maxdepth: 2 - Client (v1beta2) - Types (v1beta2) - + memcache_v1beta2/services + memcache_v1beta2/types Changelog --------- @@ -31,6 +28,6 @@ Changelog For a list of all ``google-cloud-memcache`` releases: .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - changelog + changelog diff --git a/owlbot.py b/owlbot.py deleted file mode 100644 index e32ab90..0000000 --- a/owlbot.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" -import os - -import synthtool as s -import synthtool.gcp as gcp -from synthtool.languages import python - -common = gcp.CommonTemplates() - -default_version = "v1" - -for library in s.get_staging_dirs(default_version): - excludes = [ - "setup.py", - "docs/index.rst", - "README.rst" - ] - s.move(library, excludes=excludes) - -s.remove_staging_dirs() - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=100, microgenerator=True) -python.py_samples(skip_readmes=True) -s.move( - templated_files, excludes=[".coveragerc"] -) # the microgenerator has a good coveragerc file - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) - From d05bbe6b8ad0c80ba61d1d465432f955b0dcdbf4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 19 Oct 2021 20:53:17 -0400 Subject: [PATCH 066/159] chore: fix formatting in docs/index.rst (#118) Source-Link: https://github.com/googleapis/synthtool/commit/949c010e1cd99da4a85b07f06abe19200cc535ed Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:17723e4df0cdd8d4508547ee517df2287af52c9d3f26a91e897bc618f73ec293 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- docs/index.rst | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ba7b2f7..1ca6ab4 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3728d8fd14daa46a96d04ce61c6451a3ac864dc48fb71eecbb4411f4a95618d4 + digest: sha256:17723e4df0cdd8d4508547ee517df2287af52c9d3f26a91e897bc618f73ec293 diff --git a/docs/index.rst b/docs/index.rst index 28d8305..bf4a173 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -22,12 +22,13 @@ API Reference memcache_v1beta2/services memcache_v1beta2/types + Changelog --------- For a list of all ``google-cloud-memcache`` releases: .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - changelog + changelog From ef7081424b43aa50630ad73a84a6547fed8c3ee8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Oct 2021 17:58:46 -0400 Subject: [PATCH 067/159] chore(python): push cloud library docs to staging bucket for Cloud RAD (#119) Source-Link: https://github.com/googleapis/synthtool/commit/7fd61f8efae782a7cfcecc599faf52f9737fe584 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/docs/common.cfg | 1 + noxfile.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 1ca6ab4..108063d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:17723e4df0cdd8d4508547ee517df2287af52c9d3f26a91e897bc618f73ec293 + digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 6197ce8..7cebbb8 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,6 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" + # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` value: "docs-staging-v2" } diff --git a/noxfile.py b/noxfile.py index 2bb4cf7..2a2001c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -101,7 +101,7 @@ def default(session): "py.test", "--quiet", f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google/cloud", + "--cov=google", "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", From 5159fe99b200979b54ce76633a7b8cda87931eee Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 1 Nov 2021 11:32:12 +0000 Subject: [PATCH 068/159] chore: use gapic-generator-python 0.53.4 (#120) - [ ] Regenerate this pull request now. docs: list oneofs in docstring fix(deps): require google-api-core >= 1.28.0 fix(deps): drop packaging dependency committer: busunkim96@ PiperOrigin-RevId: 406468269 Source-Link: https://github.com/googleapis/googleapis/commit/83d81b0c8fc22291a13398d6d77f02dc97a5b6f4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2ff001fbacb9e77e71d734de5f955c05fdae8526 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmZmMDAxZmJhY2I5ZTc3ZTcxZDczNGRlNWY5NTVjMDVmZGFlODUyNiJ9 --- .../services/cloud_memcache/async_client.py | 46 +++++----- .../services/cloud_memcache/client.py | 16 ++-- .../cloud_memcache/transports/base.py | 37 +------- .../cloud_memcache/transports/grpc.py | 2 +- .../cloud_memcache/transports/grpc_asyncio.py | 3 +- .../services/cloud_memcache/async_client.py | 52 ++++++----- .../services/cloud_memcache/client.py | 18 ++-- .../cloud_memcache/transports/base.py | 37 +------- .../cloud_memcache/transports/grpc.py | 2 +- .../cloud_memcache/transports/grpc_asyncio.py | 3 +- setup.py | 3 +- testing/constraints-3.6.txt | 5 +- .../gapic/memcache_v1/test_cloud_memcache.py | 91 ++----------------- .../memcache_v1beta2/test_cloud_memcache.py | 91 ++----------------- 14 files changed, 98 insertions(+), 308 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 86d638a..599f1fc 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1.services.cloud_memcache import pagers @@ -187,17 +189,17 @@ def __init__( async def list_instances( self, - request: cloud_memcache.ListInstancesRequest = None, + request: Union[cloud_memcache.ListInstancesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. Args: - request (:class:`google.cloud.memcache_v1.types.ListInstancesRequest`): + request (Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]): The request object. Request for [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. parent (:class:`str`): @@ -269,17 +271,17 @@ async def list_instances( async def get_instance( self, - request: cloud_memcache.GetInstanceRequest = None, + request: Union[cloud_memcache.GetInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. Args: - request (:class:`google.cloud.memcache_v1.types.GetInstanceRequest`): + request (Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]): The request object. Request for [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. name (:class:`str`): @@ -340,19 +342,19 @@ async def get_instance( async def create_instance( self, - request: cloud_memcache.CreateInstanceRequest = None, + request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, *, parent: str = None, instance: cloud_memcache.Instance = None, instance_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. Args: - request (:class:`google.cloud.memcache_v1.types.CreateInstanceRequest`): + request (Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]): The request object. Request for [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. parent (:class:`str`): @@ -451,11 +453,11 @@ async def create_instance( async def update_instance( self, - request: cloud_memcache.UpdateInstanceRequest = None, + request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, *, instance: cloud_memcache.Instance = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -463,7 +465,7 @@ async def update_instance( location. Args: - request (:class:`google.cloud.memcache_v1.types.UpdateInstanceRequest`): + request (Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]): The request object. Request for [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. instance (:class:`google.cloud.memcache_v1.types.Instance`): @@ -546,12 +548,12 @@ async def update_instance( async def update_parameters( self, - request: cloud_memcache.UpdateParametersRequest = None, + request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, *, name: str = None, update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -561,7 +563,7 @@ async def update_parameters( apply the parameters to nodes of the Memcached Instance. Args: - request (:class:`google.cloud.memcache_v1.types.UpdateParametersRequest`): + request (Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]): The request object. Request for [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. name (:class:`str`): @@ -649,17 +651,17 @@ async def update_parameters( async def delete_instance( self, - request: cloud_memcache.DeleteInstanceRequest = None, + request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. Args: - request (:class:`google.cloud.memcache_v1.types.DeleteInstanceRequest`): + request (Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]): The request object. Request for [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. name (:class:`str`): @@ -743,12 +745,12 @@ async def delete_instance( async def apply_parameters( self, - request: cloud_memcache.ApplyParametersRequest = None, + request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, *, name: str = None, node_ids: Sequence[str] = None, apply_all: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -757,7 +759,7 @@ async def apply_parameters( parameters for the Memcached Instance. Args: - request (:class:`google.cloud.memcache_v1.types.ApplyParametersRequest`): + request (Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]): The request object. Request for [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. name (:class:`str`): diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index f1864ad..7339c03 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1.services.cloud_memcache import pagers @@ -374,7 +376,7 @@ def list_instances( request: Union[cloud_memcache.ListInstancesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: @@ -456,7 +458,7 @@ def get_instance( request: Union[cloud_memcache.GetInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: @@ -529,7 +531,7 @@ def create_instance( parent: str = None, instance: cloud_memcache.Instance = None, instance_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -639,7 +641,7 @@ def update_instance( *, instance: cloud_memcache.Instance = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -735,7 +737,7 @@ def update_parameters( name: str = None, update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -836,7 +838,7 @@ def delete_instance( request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -932,7 +934,7 @@ def apply_parameters( name: str = None, node_ids: Sequence[str] = None, apply_all: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index f5e651e..31b196f 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -37,15 +36,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" @@ -95,7 +85,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -128,29 +118,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -187,7 +154,7 @@ def close(self): raise NotImplementedError() @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 7da9600..69200f9 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -130,7 +130,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index be0a27c..cf8fb59 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -21,7 +21,6 @@ from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -177,7 +176,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index edb5240..a1444b4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers @@ -187,17 +189,17 @@ def __init__( async def list_instances( self, - request: cloud_memcache.ListInstancesRequest = None, + request: Union[cloud_memcache.ListInstancesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. Args: - request (:class:`google.cloud.memcache_v1beta2.types.ListInstancesRequest`): + request (Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. parent (:class:`str`): @@ -269,17 +271,17 @@ async def list_instances( async def get_instance( self, - request: cloud_memcache.GetInstanceRequest = None, + request: Union[cloud_memcache.GetInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. Args: - request (:class:`google.cloud.memcache_v1beta2.types.GetInstanceRequest`): + request (Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. name (:class:`str`): @@ -340,19 +342,19 @@ async def get_instance( async def create_instance( self, - request: cloud_memcache.CreateInstanceRequest = None, + request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, *, parent: str = None, instance_id: str = None, resource: cloud_memcache.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. Args: - request (:class:`google.cloud.memcache_v1beta2.types.CreateInstanceRequest`): + request (Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. parent (:class:`str`): @@ -452,11 +454,11 @@ async def create_instance( async def update_instance( self, - request: cloud_memcache.UpdateInstanceRequest = None, + request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, *, update_mask: field_mask_pb2.FieldMask = None, resource: cloud_memcache.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -464,7 +466,7 @@ async def update_instance( location. Args: - request (:class:`google.cloud.memcache_v1beta2.types.UpdateInstanceRequest`): + request (Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -548,12 +550,12 @@ async def update_instance( async def update_parameters( self, - request: cloud_memcache.UpdateParametersRequest = None, + request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, *, name: str = None, update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -563,7 +565,7 @@ async def update_parameters( of the Memcached instance. Args: - request (:class:`google.cloud.memcache_v1beta2.types.UpdateParametersRequest`): + request (Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. name (:class:`str`): @@ -652,17 +654,17 @@ async def update_parameters( async def delete_instance( self, - request: cloud_memcache.DeleteInstanceRequest = None, + request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. Args: - request (:class:`google.cloud.memcache_v1beta2.types.DeleteInstanceRequest`): + request (Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. name (:class:`str`): @@ -746,12 +748,12 @@ async def delete_instance( async def apply_parameters( self, - request: cloud_memcache.ApplyParametersRequest = None, + request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, *, name: str = None, node_ids: Sequence[str] = None, apply_all: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -760,7 +762,7 @@ async def apply_parameters( Memcached Instance. Args: - request (:class:`google.cloud.memcache_v1beta2.types.ApplyParametersRequest`): + request (Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. name (:class:`str`): @@ -853,12 +855,12 @@ async def apply_parameters( async def apply_software_update( self, - request: cloud_memcache.ApplySoftwareUpdateRequest = None, + request: Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] = None, *, instance: str = None, node_ids: Sequence[str] = None, apply_all: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -866,7 +868,7 @@ async def apply_software_update( Instance. Args: - request (:class:`google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest`): + request (Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]): The request object. Request for [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. instance (:class:`str`): diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 7b10707..cb200ff 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers @@ -374,7 +376,7 @@ def list_instances( request: Union[cloud_memcache.ListInstancesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: @@ -456,7 +458,7 @@ def get_instance( request: Union[cloud_memcache.GetInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: @@ -529,7 +531,7 @@ def create_instance( parent: str = None, instance_id: str = None, resource: cloud_memcache.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -640,7 +642,7 @@ def update_instance( *, update_mask: field_mask_pb2.FieldMask = None, resource: cloud_memcache.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -737,7 +739,7 @@ def update_parameters( name: str = None, update_mask: field_mask_pb2.FieldMask = None, parameters: cloud_memcache.MemcacheParameters = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -839,7 +841,7 @@ def delete_instance( request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -935,7 +937,7 @@ def apply_parameters( name: str = None, node_ids: Sequence[str] = None, apply_all: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -1042,7 +1044,7 @@ def apply_software_update( instance: str = None, node_ids: Sequence[str] = None, apply_all: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index ade194d..2fb963f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -37,15 +36,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class CloudMemcacheTransport(abc.ABC): """Abstract transport class for CloudMemcache.""" @@ -95,7 +85,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -128,29 +118,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -192,7 +159,7 @@ def close(self): raise NotImplementedError() @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 53e729d..ebd9446 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -130,7 +130,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index 46bec80..a2c9d82 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -21,7 +21,6 @@ from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -177,7 +176,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) diff --git a/setup.py b/setup.py index 3a654fe..622d810 100644 --- a/setup.py +++ b/setup.py @@ -43,9 +43,8 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.26.0, <3.0.0dev", + "google-api-core[grpc] >= 1.28.0, <3.0.0dev", "proto-plus >= 1.4.0", - "packaging >= 14.3", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 8b9d25b..1e3ec8b 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -2,10 +2,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. - # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.26.0 +google-api-core==1.28.0 proto-plus==1.15.0 -packaging==14.3 -google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is required through google-api-core diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 0c0d3ed..a87e13a 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -39,9 +38,6 @@ from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache import pagers from google.cloud.memcache_v1.services.cloud_memcache import transports -from google.cloud.memcache_v1.services.cloud_memcache.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.memcache_v1.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account @@ -50,20 +46,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -220,7 +202,7 @@ def test_cloud_memcache_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,7 +219,7 @@ def test_cloud_memcache_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -254,7 +236,7 @@ def test_cloud_memcache_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -283,7 +265,7 @@ def test_cloud_memcache_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -342,7 +324,7 @@ def test_cloud_memcache_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -384,7 +366,7 @@ def test_cloud_memcache_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -406,7 +388,7 @@ def test_cloud_memcache_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -437,7 +419,7 @@ def test_cloud_memcache_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -468,7 +450,7 @@ def test_cloud_memcache_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -2284,7 +2266,6 @@ def test_cloud_memcache_base_transport(): transport.operations_client -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2305,26 +2286,6 @@ def test_cloud_memcache_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - def test_cloud_memcache_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2336,7 +2297,6 @@ def test_cloud_memcache_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2349,18 +2309,6 @@ def test_cloud_memcache_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudMemcacheClient() - adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2368,7 +2316,6 @@ def test_cloud_memcache_auth_adc_old_google_auth(): transports.CloudMemcacheGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2382,26 +2329,6 @@ def test_cloud_memcache_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 392d652..df8400f 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -41,9 +40,6 @@ from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers from google.cloud.memcache_v1beta2.services.cloud_memcache import transports -from google.cloud.memcache_v1beta2.services.cloud_memcache.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.memcache_v1beta2.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account @@ -52,20 +48,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -222,7 +204,7 @@ def test_cloud_memcache_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -239,7 +221,7 @@ def test_cloud_memcache_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -256,7 +238,7 @@ def test_cloud_memcache_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -285,7 +267,7 @@ def test_cloud_memcache_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -344,7 +326,7 @@ def test_cloud_memcache_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -386,7 +368,7 @@ def test_cloud_memcache_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -408,7 +390,7 @@ def test_cloud_memcache_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -439,7 +421,7 @@ def test_cloud_memcache_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -470,7 +452,7 @@ def test_cloud_memcache_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -2520,7 +2502,6 @@ def test_cloud_memcache_base_transport(): transport.operations_client -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2541,26 +2522,6 @@ def test_cloud_memcache_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - def test_cloud_memcache_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2572,7 +2533,6 @@ def test_cloud_memcache_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2585,18 +2545,6 @@ def test_cloud_memcache_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudMemcacheClient() - adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2604,7 +2552,6 @@ def test_cloud_memcache_auth_adc_old_google_auth(): transports.CloudMemcacheGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_cloud_memcache_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2618,26 +2565,6 @@ def test_cloud_memcache_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_cloud_memcache_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ From 3f48561f966669d45c072d1a4f273e07c4dd95bc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 2 Nov 2021 15:06:13 +0000 Subject: [PATCH 069/159] chore: release 1.2.1 (#121) :robot: I have created a release \*beep\* \*boop\* --- ### [1.2.1](https://www.github.com/googleapis/python-memcache/compare/v1.2.0...v1.2.1) (2021-11-01) ### Bug Fixes * **deps:** drop packaging dependency ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) * **deps:** require google-api-core >= 1.28.0 ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) ### Documentation * list oneofs in docstring ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 13 +++++++++++++ setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f5452d1..124c154 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +### [1.2.1](https://www.github.com/googleapis/python-memcache/compare/v1.2.0...v1.2.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) +* **deps:** require google-api-core >= 1.28.0 ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) + + +### Documentation + +* list oneofs in docstring ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) + ## [1.2.0](https://www.github.com/googleapis/python-memcache/compare/v1.1.3...v1.2.0) (2021-10-12) diff --git a/setup.py b/setup.py index 622d810..c23eb67 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.2.0" +version = "1.2.1" package_root = os.path.abspath(os.path.dirname(__file__)) From a4344796e9ec60ea817f92436cabcaef0099207f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Nov 2021 17:59:10 -0500 Subject: [PATCH 070/159] chore: use gapic-generator-python 0.56.2 (#124) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update Java and Python dependencies PiperOrigin-RevId: 408420890 Source-Link: https://github.com/googleapis/googleapis/commit/2921f9fb3bfbd16f6b2da0104373e2b47a80a65e Source-Link: https://github.com/googleapis/googleapis-gen/commit/6598ca8cbbf5226733a099c4506518a5af6ff74c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjU5OGNhOGNiYmY1MjI2NzMzYTA5OWM0NTA2NTE4YTVhZjZmZjc0YyJ9 * 🩉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/cloud_memcache/async_client.py | 13 +- .../services/cloud_memcache/client.py | 25 ++-- .../cloud_memcache/transports/base.py | 10 +- .../cloud_memcache/transports/grpc.py | 6 +- .../cloud_memcache/transports/grpc_asyncio.py | 6 +- .../services/cloud_memcache/async_client.py | 13 +- .../services/cloud_memcache/client.py | 25 ++-- .../cloud_memcache/transports/base.py | 10 +- .../cloud_memcache/transports/grpc.py | 6 +- .../cloud_memcache/transports/grpc_asyncio.py | 6 +- .../gapic/memcache_v1/test_cloud_memcache.py | 112 +++++++++++---- .../memcache_v1beta2/test_cloud_memcache.py | 136 +++++++++++++----- 12 files changed, 258 insertions(+), 110 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 599f1fc..f0590b8 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 7339c03..f0f6dcb 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -305,8 +307,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 31b196f..63c6c77 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 69200f9..07e00f3 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index cf8fb59..7c447bb 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index a1444b4..b9baa6b 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index cb200ff..722e2a7 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -305,8 +307,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 2fb963f..b275f94 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index ebd9446..0538a36 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index a2c9d82..fbccaba 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index a87e13a..cb1a6ea 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -639,7 +639,9 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_instances_flattened_error(): @@ -675,7 +677,9 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1015,7 +1019,9 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_instance_flattened_error(): @@ -1051,7 +1057,9 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1219,9 +1227,15 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].instance_id == "instance_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_memcache.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val def test_create_instance_flattened_error(): @@ -1264,9 +1278,15 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].instance_id == "instance_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_memcache.Instance(name="name_value") + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1440,8 +1460,12 @@ def test_update_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].instance + mock_val = cloud_memcache.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val def test_update_instance_flattened_error(): @@ -1482,8 +1506,12 @@ async def test_update_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].instance == cloud_memcache.Instance(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].instance + mock_val = cloud_memcache.Instance(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val @pytest.mark.asyncio @@ -1665,9 +1693,15 @@ def test_update_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) - assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + arg = args[0].parameters + mock_val = cloud_memcache.MemcacheParameters(id="id_value") + assert arg == mock_val def test_update_parameters_flattened_error(): @@ -1712,9 +1746,15 @@ async def test_update_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) - assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + arg = args[0].parameters + mock_val = cloud_memcache.MemcacheParameters(id="id_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1881,7 +1921,9 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_instance_flattened_error(): @@ -1917,7 +1959,9 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2083,9 +2127,15 @@ def test_apply_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].node_ids + mock_val = ["node_ids_value"] + assert arg == mock_val + arg = args[0].apply_all + mock_val = True + assert arg == mock_val def test_apply_parameters_flattened_error(): @@ -2126,9 +2176,15 @@ async def test_apply_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].node_ids + mock_val = ["node_ids_value"] + assert arg == mock_val + arg = args[0].apply_all + mock_val = True + assert arg == mock_val @pytest.mark.asyncio diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index df8400f..3157ec7 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -641,7 +641,9 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_instances_flattened_error(): @@ -677,7 +679,9 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1021,7 +1025,9 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_instance_flattened_error(): @@ -1057,7 +1063,9 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1225,9 +1233,15 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance_id == "instance_id_value" - assert args[0].resource == cloud_memcache.Instance(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + arg = args[0].resource + mock_val = cloud_memcache.Instance(name="name_value") + assert arg == mock_val def test_create_instance_flattened_error(): @@ -1270,9 +1284,15 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].instance_id == "instance_id_value" - assert args[0].resource == cloud_memcache.Instance(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].instance_id + mock_val = "instance_id_value" + assert arg == mock_val + arg = args[0].resource + mock_val = cloud_memcache.Instance(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1446,8 +1466,12 @@ def test_update_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) - assert args[0].resource == cloud_memcache.Instance(name="name_value") + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + arg = args[0].resource + mock_val = cloud_memcache.Instance(name="name_value") + assert arg == mock_val def test_update_instance_flattened_error(): @@ -1488,8 +1512,12 @@ async def test_update_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) - assert args[0].resource == cloud_memcache.Instance(name="name_value") + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + arg = args[0].resource + mock_val = cloud_memcache.Instance(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1671,9 +1699,15 @@ def test_update_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) - assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + arg = args[0].parameters + mock_val = cloud_memcache.MemcacheParameters(id="id_value") + assert arg == mock_val def test_update_parameters_flattened_error(): @@ -1718,9 +1752,15 @@ async def test_update_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) - assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + arg = args[0].parameters + mock_val = cloud_memcache.MemcacheParameters(id="id_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1887,7 +1927,9 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_instance_flattened_error(): @@ -1923,7 +1965,9 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2089,9 +2133,15 @@ def test_apply_parameters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].node_ids + mock_val = ["node_ids_value"] + assert arg == mock_val + arg = args[0].apply_all + mock_val = True + assert arg == mock_val def test_apply_parameters_flattened_error(): @@ -2132,9 +2182,15 @@ async def test_apply_parameters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].node_ids + mock_val = ["node_ids_value"] + assert arg == mock_val + arg = args[0].apply_all + mock_val = True + assert arg == mock_val @pytest.mark.asyncio @@ -2316,9 +2372,15 @@ def test_apply_software_update_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].instance == "instance_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].node_ids + mock_val = ["node_ids_value"] + assert arg == mock_val + arg = args[0].apply_all + mock_val = True + assert arg == mock_val def test_apply_software_update_flattened_error(): @@ -2361,9 +2423,15 @@ async def test_apply_software_update_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].instance == "instance_value" - assert args[0].node_ids == ["node_ids_value"] - assert args[0].apply_all == True + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].node_ids + mock_val = ["node_ids_value"] + assert arg == mock_val + arg = args[0].apply_all + mock_val = True + assert arg == mock_val @pytest.mark.asyncio From 41450edf46f73ec854b5c5c074497483d30a1b24 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Nov 2021 12:18:44 -0500 Subject: [PATCH 071/159] chore(python): add .github/CODEOWNERS as a templated file (#125) Source-Link: https://github.com/googleapis/synthtool/commit/c5026b3217973a8db55db8ee85feee0e9a65e295 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 108063d..7519fa3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 + digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 30c3973..44cc868 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# The @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/yoshi-python +# @googleapis/yoshi-python is the default owner for changes in this repo +* @googleapis/yoshi-python -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/python-samples-owners \ No newline at end of file +# @googleapis/python-samples-owners is the default owner for samples changes +/samples/ @googleapis/python-samples-owners From 55bb11578cdbe59ddfdc3078c510cc1bd36c9e19 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 17 Nov 2021 17:15:38 -0500 Subject: [PATCH 072/159] chore: update doc links from googleapis.dev to cloud.google.com (#126) --- .repo-metadata.json | 2 +- README.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index 6fced9a..f0d455c 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "memcache", "name_pretty": "Cloud Memorystore for Memcached", "product_documentation": "cloud.google.com/memorystore/docs/memcached/", - "client_documentation": "https://googleapis.dev/python/memcache/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/memcache/latest", "issue_tracker": "", "release_level": "ga", "language": "python", diff --git a/README.rst b/README.rst index f643869..7ce5ce2 100644 --- a/README.rst +++ b/README.rst @@ -16,7 +16,7 @@ compatible with OSS Memcached protocol. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-memcache.svg :target: https://pypi.org/project/google-cloud-memcache/ .. _Cloud Memorystore for Memached API: https://cloud.google.com/memorystore/docs/memcached/ -.. _Client Library Documentation: https://googleapis.dev/python/memcache/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/memcache/latest .. _Product Documentation: https://cloud.google.com/memorystore/docs/memcached/ Quick Start From f651484922693f1f55a79135bbf8bae93e3a7125 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 28 Dec 2021 13:20:34 -0500 Subject: [PATCH 073/159] chore: update .repo-metadata.json (#129) --- .repo-metadata.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index f0d455c..56dbda8 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -4,12 +4,13 @@ "product_documentation": "cloud.google.com/memorystore/docs/memcached/", "client_documentation": "https://cloud.google.com/python/docs/reference/memcache/latest", "issue_tracker": "", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_AUTO", "repo": "googleapis/python-memcache", "distribution_name": "google-cloud-memcache", "api_id": "memcache.googleapis.com", "default_version": "v1", - "codeowner_team": "" + "codeowner_team": "", + "api_shortname": "memcache" } From f7b836904f698f395eaddfbcd971a0d35bbef23a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 17:18:13 +0000 Subject: [PATCH 074/159] chore: use python-samples-reviewers (#131) --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7519fa3..f33299d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 + digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 44cc868..e446644 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -8,5 +8,5 @@ # @googleapis/yoshi-python is the default owner for changes in this repo * @googleapis/yoshi-python -# @googleapis/python-samples-owners is the default owner for samples changes -/samples/ @googleapis/python-samples-owners +# @googleapis/python-samples-reviewers is the default owner for samples changes +/samples/ @googleapis/python-samples-reviewers From 5990c34f6a61ae7bb7c34db626e06d2df0f7f7ee Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 7 Jan 2022 19:41:41 -0500 Subject: [PATCH 075/159] chore: use gapic-generator-python 0.58.4 (#130) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.58.4 fix: provide appropriate mock values for message body fields committer: dovs PiperOrigin-RevId: 419025932 Source-Link: https://github.com/googleapis/googleapis/commit/73da6697f598f1ba30618924936a59f8e457ec89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46df624a54b9ed47c1a7eefb7a49413cf7b82f98 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDZkZjYyNGE1NGI5ZWQ0N2MxYTdlZWZiN2E0OTQxM2NmN2I4MmY5OCJ9 * 🩉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../cloud_memcache/transports/base.py | 1 - .../cloud_memcache/transports/base.py | 1 - .../gapic/memcache_v1/test_cloud_memcache.py | 91 ++++++---------- .../memcache_v1beta2/test_cloud_memcache.py | 102 ++++++------------ 4 files changed, 66 insertions(+), 129 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 63c6c77..de1431d 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -101,7 +101,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index b275f94..ac4e951 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -101,7 +101,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index cb1a6ea..1355e9e 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -252,20 +252,20 @@ def test_cloud_memcache_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -324,7 +324,7 @@ def test_cloud_memcache_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -419,7 +419,7 @@ def test_cloud_memcache_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -450,7 +450,7 @@ def test_cloud_memcache_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -483,9 +483,8 @@ def test_cloud_memcache_client_client_options_from_dict(): ) -def test_list_instances( - transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.ListInstancesRequest, dict,]) +def test_list_instances(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -513,10 +512,6 @@ def test_list_instances( assert response.unreachable == ["unreachable_value"] -def test_list_instances_from_dict(): - test_list_instances(request_type=dict) - - def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -696,8 +691,10 @@ async def test_list_instances_flattened_error_async(): ) -def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_instances_pager(transport_name: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -734,8 +731,10 @@ def test_list_instances_pager(): assert all(isinstance(i, cloud_memcache.Instance) for i in results) -def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_instances_pages(transport_name: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -834,9 +833,8 @@ async def test_list_instances_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_instance( - transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.GetInstanceRequest, dict,]) +def test_get_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -879,10 +877,6 @@ def test_get_instance( assert response.discovery_endpoint == "discovery_endpoint_value" -def test_get_instance_from_dict(): - test_get_instance(request_type=dict) - - def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1076,9 +1070,8 @@ async def test_get_instance_flattened_error_async(): ) -def test_create_instance( - transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.CreateInstanceRequest, dict,]) +def test_create_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1102,10 +1095,6 @@ def test_create_instance( assert isinstance(response, future.Future) -def test_create_instance_from_dict(): - test_create_instance(request_type=dict) - - def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1306,9 +1295,8 @@ async def test_create_instance_flattened_error_async(): ) -def test_update_instance( - transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.UpdateInstanceRequest, dict,]) +def test_update_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1332,10 +1320,6 @@ def test_update_instance( assert isinstance(response, future.Future) -def test_update_instance_from_dict(): - test_update_instance(request_type=dict) - - def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1530,9 +1514,10 @@ async def test_update_instance_flattened_error_async(): ) -def test_update_parameters( - transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest -): +@pytest.mark.parametrize( + "request_type", [cloud_memcache.UpdateParametersRequest, dict,] +) +def test_update_parameters(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1558,10 +1543,6 @@ def test_update_parameters( assert isinstance(response, future.Future) -def test_update_parameters_from_dict(): - test_update_parameters(request_type=dict) - - def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1774,9 +1755,8 @@ async def test_update_parameters_flattened_error_async(): ) -def test_delete_instance( - transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.DeleteInstanceRequest, dict,]) +def test_delete_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1800,10 +1780,6 @@ def test_delete_instance( assert isinstance(response, future.Future) -def test_delete_instance_from_dict(): - test_delete_instance(request_type=dict) - - def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1978,9 +1954,8 @@ async def test_delete_instance_flattened_error_async(): ) -def test_apply_parameters( - transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.ApplyParametersRequest, dict,]) +def test_apply_parameters(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2004,10 +1979,6 @@ def test_apply_parameters( assert isinstance(response, future.Future) -def test_apply_parameters_from_dict(): - test_apply_parameters(request_type=dict) - - def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2753,7 +2724,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 3157ec7..d827a71 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -254,20 +254,20 @@ def test_cloud_memcache_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -326,7 +326,7 @@ def test_cloud_memcache_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -421,7 +421,7 @@ def test_cloud_memcache_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -452,7 +452,7 @@ def test_cloud_memcache_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -485,9 +485,8 @@ def test_cloud_memcache_client_client_options_from_dict(): ) -def test_list_instances( - transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.ListInstancesRequest, dict,]) +def test_list_instances(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -515,10 +514,6 @@ def test_list_instances( assert response.unreachable == ["unreachable_value"] -def test_list_instances_from_dict(): - test_list_instances(request_type=dict) - - def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -698,8 +693,10 @@ async def test_list_instances_flattened_error_async(): ) -def test_list_instances_pager(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_instances_pager(transport_name: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -736,8 +733,10 @@ def test_list_instances_pager(): assert all(isinstance(i, cloud_memcache.Instance) for i in results) -def test_list_instances_pages(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_instances_pages(transport_name: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -836,9 +835,8 @@ async def test_list_instances_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_instance( - transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.GetInstanceRequest, dict,]) +def test_get_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -883,10 +881,6 @@ def test_get_instance( assert response.update_available is True -def test_get_instance_from_dict(): - test_get_instance(request_type=dict) - - def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1082,9 +1076,8 @@ async def test_get_instance_flattened_error_async(): ) -def test_create_instance( - transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.CreateInstanceRequest, dict,]) +def test_create_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1108,10 +1101,6 @@ def test_create_instance( assert isinstance(response, future.Future) -def test_create_instance_from_dict(): - test_create_instance(request_type=dict) - - def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1312,9 +1301,8 @@ async def test_create_instance_flattened_error_async(): ) -def test_update_instance( - transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.UpdateInstanceRequest, dict,]) +def test_update_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1338,10 +1326,6 @@ def test_update_instance( assert isinstance(response, future.Future) -def test_update_instance_from_dict(): - test_update_instance(request_type=dict) - - def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1536,9 +1520,10 @@ async def test_update_instance_flattened_error_async(): ) -def test_update_parameters( - transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest -): +@pytest.mark.parametrize( + "request_type", [cloud_memcache.UpdateParametersRequest, dict,] +) +def test_update_parameters(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1564,10 +1549,6 @@ def test_update_parameters( assert isinstance(response, future.Future) -def test_update_parameters_from_dict(): - test_update_parameters(request_type=dict) - - def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1780,9 +1761,8 @@ async def test_update_parameters_flattened_error_async(): ) -def test_delete_instance( - transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.DeleteInstanceRequest, dict,]) +def test_delete_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1806,10 +1786,6 @@ def test_delete_instance( assert isinstance(response, future.Future) -def test_delete_instance_from_dict(): - test_delete_instance(request_type=dict) - - def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1984,9 +1960,8 @@ async def test_delete_instance_flattened_error_async(): ) -def test_apply_parameters( - transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest -): +@pytest.mark.parametrize("request_type", [cloud_memcache.ApplyParametersRequest, dict,]) +def test_apply_parameters(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2010,10 +1985,6 @@ def test_apply_parameters( assert isinstance(response, future.Future) -def test_apply_parameters_from_dict(): - test_apply_parameters(request_type=dict) - - def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2210,9 +2181,10 @@ async def test_apply_parameters_flattened_error_async(): ) -def test_apply_software_update( - transport: str = "grpc", request_type=cloud_memcache.ApplySoftwareUpdateRequest -): +@pytest.mark.parametrize( + "request_type", [cloud_memcache.ApplySoftwareUpdateRequest, dict,] +) +def test_apply_software_update(request_type, transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2238,10 +2210,6 @@ def test_apply_software_update( assert isinstance(response, future.Future) -def test_apply_software_update_from_dict(): - test_apply_software_update(request_type=dict) - - def test_apply_software_update_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3001,7 +2969,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( From 0dbb13cba5d42078f8ad475b1ee3c21ac47311d4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 15:50:11 +0000 Subject: [PATCH 076/159] build: switch to release-please for tagging (#132) --- .github/.OwlBot.lock.yaml | 2 +- .github/release-please.yml | 1 + .github/release-trigger.yml | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .github/release-trigger.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f33299d..ff5126c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad0..466597e 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 0000000..d4ca941 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From abd8388ac1594505fcd9963bdb53d6949d952f30 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 11:57:45 -0500 Subject: [PATCH 077/159] chore(python): update release.sh to use keystore (#133) Source-Link: https://github.com/googleapis/synthtool/commit/69fda12e2994f0b595a397e8bb6e3e9f380524eb Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/release.sh | 2 +- .kokoro/release/common.cfg | 12 +++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ff5126c..eecb84c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 01e25c7..5131ab8 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-memcache python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 921b289..aea3116 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-memcache/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } From 5a9f20c34b198650b1b9af80b0aa5e725ce8d221 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 20 Jan 2022 08:52:30 -0500 Subject: [PATCH 078/159] ci(python): run lint / unit tests / docs as GH actions (#134) * ci(python): run lint / unit tests / docs as GH actions Source-Link: https://github.com/googleapis/synthtool/commit/57be0cdb0b94e1669cee0ca38d790de1dfdbcd44 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 * add a commit to activate gh actions Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 16 +++++++++- .github/workflows/docs.yml | 38 +++++++++++++++++++++++ .github/workflows/lint.yml | 25 +++++++++++++++ .github/workflows/unittest.yml | 57 ++++++++++++++++++++++++++++++++++ 4 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/unittest.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index eecb84c..b668c04 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..f7b8344 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..1e8b05c --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 0000000..074ee25 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=100 From ef5104e0922d980c0023b65665f29f27c14cddcc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 Jan 2022 10:13:42 -0500 Subject: [PATCH 079/159] feat: add api key support (#135) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: upgrade gapic-generator-java, gax-java and gapic-generator-python PiperOrigin-RevId: 423842556 Source-Link: https://github.com/googleapis/googleapis/commit/a616ca08f4b1416abbac7bc5dd6d61c791756a81 Source-Link: https://github.com/googleapis/googleapis-gen/commit/29b938c58c1e51d019f2ee539d55dc0a3c86a905 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjliOTM4YzU4YzFlNTFkMDE5ZjJlZTUzOWQ1NWRjMGEzYzg2YTkwNSJ9 * 🩉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/cloud_memcache/async_client.py | 38 +++++- .../services/cloud_memcache/client.py | 127 +++++++++++------ .../services/cloud_memcache/async_client.py | 38 +++++- .../services/cloud_memcache/client.py | 127 +++++++++++------ .../gapic/memcache_v1/test_cloud_memcache.py | 128 ++++++++++++++++++ .../memcache_v1beta2/test_cloud_memcache.py | 128 ++++++++++++++++++ 6 files changed, 498 insertions(+), 88 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index f0590b8..287f095 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -130,6 +130,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudMemcacheClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> CloudMemcacheTransport: """Returns the transport used by the client instance. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index f0f6dcb..d1b4d05 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -256,6 +256,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -306,57 +373,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, CloudMemcacheTransport): # transport is a CloudMemcacheTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -368,6 +400,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index b9baa6b..996a928 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -130,6 +130,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudMemcacheClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> CloudMemcacheTransport: """Returns the transport used by the client instance. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 722e2a7..f370ef7 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -256,6 +256,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -306,57 +373,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, CloudMemcacheTransport): # transport is a CloudMemcacheTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -368,6 +400,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 1355e9e..5c6f456 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -401,6 +401,87 @@ def test_cloud_memcache_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient] +) +@mock.patch.object( + CloudMemcacheClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheClient), +) +@mock.patch.object( + CloudMemcacheAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheAsyncClient), +) +def test_cloud_memcache_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -2195,6 +2276,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudMemcacheClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.CloudMemcacheGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2789,3 +2887,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport), + (CloudMemcacheAsyncClient, transports.CloudMemcacheGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index d827a71..8c0eba4 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -403,6 +403,87 @@ def test_cloud_memcache_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient] +) +@mock.patch.object( + CloudMemcacheClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheClient), +) +@mock.patch.object( + CloudMemcacheAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheAsyncClient), +) +def test_cloud_memcache_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -2439,6 +2520,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudMemcacheClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.CloudMemcacheGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -3034,3 +3132,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport), + (CloudMemcacheAsyncClient, transports.CloudMemcacheGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From 5f8a2b4fe5fcc0c4a2be6b9f8529f4ceacbf6421 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Feb 2022 14:38:49 +0000 Subject: [PATCH 080/159] chore: use gapic-generator-python 0.62.1 (#138) - [ ] Regenerate this pull request now. fix: resolve DuplicateCredentialArgs error when using credentials_file committer: parthea PiperOrigin-RevId: 425964861 Source-Link: https://github.com/googleapis/googleapis/commit/84b1a5a4f6fb2d04905be58e586b8a7a4310a8cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/4fb761bbd8506ac156f49bac5f18306aa8eb3aa8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGZiNzYxYmJkODUwNmFjMTU2ZjQ5YmFjNWYxODMwNmFhOGViM2FhOCJ9 --- .../services/cloud_memcache/async_client.py | 14 ++-- .../services/cloud_memcache/client.py | 14 ++-- .../cloud_memcache/transports/grpc.py | 7 +- .../cloud_memcache/transports/grpc_asyncio.py | 7 +- .../cloud/memcache_v1/types/cloud_memcache.py | 11 ++- .../services/cloud_memcache/async_client.py | 16 ++-- .../services/cloud_memcache/client.py | 16 ++-- .../cloud_memcache/transports/grpc.py | 7 +- .../cloud_memcache/transports/grpc_asyncio.py | 7 +- .../memcache_v1beta2/types/cloud_memcache.py | 7 +- .../gapic/memcache_v1/test_cloud_memcache.py | 80 ++++++++++++++++++- .../memcache_v1beta2/test_cloud_memcache.py | 80 ++++++++++++++++++- 12 files changed, 212 insertions(+), 54 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 287f095..15f0e0e 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -266,7 +266,7 @@ async def list_instances( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -343,7 +343,7 @@ async def get_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -442,7 +442,7 @@ async def create_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance, instance_id]) if request is not None and has_flattened_params: @@ -537,7 +537,7 @@ async def update_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, update_mask]) if request is not None and has_flattened_params: @@ -640,7 +640,7 @@ async def update_parameters( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, update_mask, parameters]) if request is not None and has_flattened_params: @@ -738,7 +738,7 @@ async def delete_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -842,7 +842,7 @@ async def apply_parameters( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, node_ids, apply_all]) if request is not None and has_flattened_params: diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index d1b4d05..db60675 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -461,7 +461,7 @@ def list_instances( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -538,7 +538,7 @@ def get_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -637,7 +637,7 @@ def create_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance, instance_id]) if request is not None and has_flattened_params: @@ -732,7 +732,7 @@ def update_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, update_mask]) if request is not None and has_flattened_params: @@ -835,7 +835,7 @@ def update_parameters( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, update_mask, parameters]) if request is not None and has_flattened_params: @@ -933,7 +933,7 @@ def delete_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1037,7 +1037,7 @@ def apply_parameters( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, node_ids, apply_all]) if request is not None and has_flattened_params: diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 07e00f3..531a3a5 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -179,8 +179,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -253,7 +256,7 @@ def operations_client(self) -> operations_v1.OperationsClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient(self.grpc_channel) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index 7c447bb..d23dfaf 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -224,8 +224,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -255,7 +258,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index ad22da1..065ae9c 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -66,8 +66,7 @@ class Instance(proto.Message): Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. - https://cloud.google.com/compute/docs/labeling- - resources + https://cloud.google.com/compute/docs/labeling-resources authorized_network (str): The full name of the Google Compute Engine `network `__ @@ -188,8 +187,8 @@ class InstanceMessage(proto.Message): Attributes: code (google.cloud.memcache_v1.types.Instance.InstanceMessage.Code): - A code that correspond to one type of user- - acing message. + A code that correspond to one type of + user-facing message. message (str): Message on memcached instance which will be exposed to users. @@ -382,8 +381,8 @@ class ApplyParametersRequest(proto.Message): instance for which parameter group updates should be applied. node_ids (Sequence[str]): - Nodes to which we should apply the instance- - evel parameter group. + Nodes to which we should apply the + instance-level parameter group. apply_all (bool): Whether to apply instance-level parameter group to all nodes. If set to true, will diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 996a928..568409d 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -266,7 +266,7 @@ async def list_instances( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -343,7 +343,7 @@ async def get_instance( A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -443,7 +443,7 @@ async def create_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance_id, resource]) if request is not None and has_flattened_params: @@ -539,7 +539,7 @@ async def update_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([update_mask, resource]) if request is not None and has_flattened_params: @@ -643,7 +643,7 @@ async def update_parameters( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, update_mask, parameters]) if request is not None and has_flattened_params: @@ -741,7 +741,7 @@ async def delete_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -844,7 +844,7 @@ async def apply_parameters( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, node_ids, apply_all]) if request is not None and has_flattened_params: @@ -953,7 +953,7 @@ async def apply_software_update( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, node_ids, apply_all]) if request is not None and has_flattened_params: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index f370ef7..fa4113d 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -461,7 +461,7 @@ def list_instances( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -538,7 +538,7 @@ def get_instance( A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -638,7 +638,7 @@ def create_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance_id, resource]) if request is not None and has_flattened_params: @@ -734,7 +734,7 @@ def update_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([update_mask, resource]) if request is not None and has_flattened_params: @@ -838,7 +838,7 @@ def update_parameters( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, update_mask, parameters]) if request is not None and has_flattened_params: @@ -936,7 +936,7 @@ def delete_instance( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1039,7 +1039,7 @@ def apply_parameters( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, node_ids, apply_all]) if request is not None and has_flattened_params: @@ -1148,7 +1148,7 @@ def apply_software_update( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, node_ids, apply_all]) if request is not None and has_flattened_params: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 0538a36..b12ba06 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -179,8 +179,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -253,7 +256,7 @@ def operations_client(self) -> operations_v1.OperationsClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient(self.grpc_channel) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index fbccaba..d81422c 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -224,8 +224,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -255,7 +258,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 0ae9e80..31be119 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -71,8 +71,7 @@ class Instance(proto.Message): Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. - https://cloud.google.com/compute/docs/labeling- - resources + https://cloud.google.com/compute/docs/labeling-resources authorized_network (str): The full name of the Google Compute Engine `network `__ to which @@ -201,8 +200,8 @@ class InstanceMessage(proto.Message): Attributes: code (google.cloud.memcache_v1beta2.types.Instance.InstanceMessage.Code): - A code that correspond to one type of user- - acing message. + A code that correspond to one type of + user-facing message. message (str): Message on memcached instance which will be exposed to users. diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 5c6f456..9be0c84 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template @@ -514,21 +515,28 @@ def test_cloud_memcache_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheClient, + transports.CloudMemcacheGrpcTransport, + "grpc", + grpc_helpers, + ), ( CloudMemcacheAsyncClient, transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_cloud_memcache_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -564,6 +572,72 @@ def test_cloud_memcache_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CloudMemcacheClient, + transports.CloudMemcacheGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_memcache_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="memcache.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [cloud_memcache.ListInstancesRequest, dict,]) def test_list_instances(request_type, transport: str = "grpc"): client = CloudMemcacheClient( diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 8c0eba4..cdafb55 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template @@ -516,21 +517,28 @@ def test_cloud_memcache_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheClient, + transports.CloudMemcacheGrpcTransport, + "grpc", + grpc_helpers, + ), ( CloudMemcacheAsyncClient, transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_cloud_memcache_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -566,6 +574,72 @@ def test_cloud_memcache_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CloudMemcacheClient, + transports.CloudMemcacheGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_memcache_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "memcache.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="memcache.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [cloud_memcache.ListInstancesRequest, dict,]) def test_list_instances(request_type, transport: str = "grpc"): client = CloudMemcacheClient( From 2788bb4746e197bacd3334c8efa541f6f9bee834 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 11 Feb 2022 14:34:13 -0700 Subject: [PATCH 081/159] chore: use gapic-generator-python 0.63.2 (#140) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.63.2 docs: add generated snippets PiperOrigin-RevId: 427792504 Source-Link: https://github.com/googleapis/googleapis/commit/55b9e1e0b3106c850d13958352bc0751147b6b15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf4e86b753f42cb0edb1fd51fbe840d7da0a1cde Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmY0ZTg2Yjc1M2Y0MmNiMGVkYjFmZDUxZmJlODQwZDdkYTBhMWNkZSJ9 * 🩉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/cloud_memcache/async_client.py | 171 +++++ .../services/cloud_memcache/client.py | 178 +++++ .../services/cloud_memcache/async_client.py | 195 +++++ .../services/cloud_memcache/client.py | 203 +++++ ...1_cloud_memcache_apply_parameters_async.py | 49 ++ ...v1_cloud_memcache_apply_parameters_sync.py | 49 ++ ...v1_cloud_memcache_create_instance_async.py | 57 ++ ..._v1_cloud_memcache_create_instance_sync.py | 57 ++ ...v1_cloud_memcache_delete_instance_async.py | 49 ++ ..._v1_cloud_memcache_delete_instance_sync.py | 49 ++ ...he_v1_cloud_memcache_get_instance_async.py | 45 ++ ...che_v1_cloud_memcache_get_instance_sync.py | 45 ++ ..._v1_cloud_memcache_list_instances_async.py | 46 ++ ...e_v1_cloud_memcache_list_instances_sync.py | 46 ++ ...v1_cloud_memcache_update_instance_async.py | 55 ++ ..._v1_cloud_memcache_update_instance_sync.py | 55 ++ ..._cloud_memcache_update_parameters_async.py | 49 ++ ...1_cloud_memcache_update_parameters_sync.py | 49 ++ ...2_cloud_memcache_apply_parameters_async.py | 49 ++ ...a2_cloud_memcache_apply_parameters_sync.py | 49 ++ ...ud_memcache_apply_software_update_async.py | 49 ++ ...oud_memcache_apply_software_update_sync.py | 49 ++ ...a2_cloud_memcache_create_instance_async.py | 57 ++ ...ta2_cloud_memcache_create_instance_sync.py | 57 ++ ...a2_cloud_memcache_delete_instance_async.py | 49 ++ ...ta2_cloud_memcache_delete_instance_sync.py | 49 ++ ...beta2_cloud_memcache_get_instance_async.py | 45 ++ ...1beta2_cloud_memcache_get_instance_sync.py | 45 ++ ...ta2_cloud_memcache_list_instances_async.py | 46 ++ ...eta2_cloud_memcache_list_instances_sync.py | 46 ++ ...a2_cloud_memcache_update_instance_async.py | 55 ++ ...ta2_cloud_memcache_update_instance_sync.py | 55 ++ ..._cloud_memcache_update_parameters_async.py | 49 ++ ...2_cloud_memcache_update_parameters_sync.py | 49 ++ .../snippet_metadata_memcache_v1.json | 627 +++++++++++++++ .../snippet_metadata_memcache_v1beta2.json | 716 ++++++++++++++++++ 36 files changed, 3588 insertions(+) create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py create mode 100644 samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py create mode 100644 samples/generated_samples/snippet_metadata_memcache_v1.json create mode 100644 samples/generated_samples/snippet_metadata_memcache_v1beta2.json diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 15f0e0e..f8699da 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -237,6 +237,26 @@ async def list_instances( ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_list_instances(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]): The request object. Request for @@ -319,6 +339,25 @@ async def get_instance( ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_get_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]): The request object. Request for @@ -392,6 +431,37 @@ async def create_instance( ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_create_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + instance = memcache_v1.Instance() + instance.name = "name_value" + instance.node_count = 1070 + instance.node_config.cpu_count = 976 + instance.node_config.memory_size_mb = 1505 + + request = memcache_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]): The request object. Request for @@ -503,6 +573,36 @@ async def update_instance( r"""Updates an existing Instance in a given project and location. + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_update_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + instance = memcache_v1.Instance() + instance.name = "name_value" + instance.node_count = 1070 + instance.node_config.cpu_count = 976 + instance.node_config.memory_size_mb = 1505 + + request = memcache_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]): The request object. Request for @@ -601,6 +701,30 @@ async def update_parameters( parameters, it must be followed by ApplyParameters to apply the parameters to nodes of the Memcached Instance. + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_update_parameters(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.UpdateParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.update_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]): The request object. Request for @@ -699,6 +823,29 @@ async def delete_instance( ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_delete_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]): The request object. Request for @@ -797,6 +944,30 @@ async def apply_parameters( nodes in order to update them to the current set of parameters for the Memcached Instance. + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_apply_parameters(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.ApplyParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.apply_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]): The request object. Request for diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index db60675..c0129a3 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -432,6 +432,27 @@ def list_instances( ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_list_instances(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]): The request object. Request for @@ -514,6 +535,26 @@ def get_instance( ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_get_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]): The request object. Request for @@ -587,6 +628,38 @@ def create_instance( ) -> operation.Operation: r"""Creates a new Instance in a given location. + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_create_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + instance = memcache_v1.Instance() + instance.name = "name_value" + instance.node_count = 1070 + instance.node_config.cpu_count = 976 + instance.node_config.memory_size_mb = 1505 + + request = memcache_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]): The request object. Request for @@ -698,6 +771,37 @@ def update_instance( r"""Updates an existing Instance in a given project and location. + + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_update_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + instance = memcache_v1.Instance() + instance.name = "name_value" + instance.node_count = 1070 + instance.node_config.cpu_count = 976 + instance.node_config.memory_size_mb = 1505 + + request = memcache_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]): The request object. Request for @@ -796,6 +900,31 @@ def update_parameters( parameters, it must be followed by ApplyParameters to apply the parameters to nodes of the Memcached Instance. + + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_update_parameters(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.UpdateParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.update_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]): The request object. Request for @@ -894,6 +1023,30 @@ def delete_instance( ) -> operation.Operation: r"""Deletes a single Instance. + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_delete_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]): The request object. Request for @@ -992,6 +1145,31 @@ def apply_parameters( nodes in order to update them to the current set of parameters for the Memcached Instance. + + + .. code-block:: + + from google.cloud import memcache_v1 + + def sample_apply_parameters(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.ApplyParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.apply_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]): The request object. Request for diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 568409d..34b8605 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -237,6 +237,26 @@ async def list_instances( ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_list_instances(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]): The request object. Request for @@ -319,6 +339,25 @@ async def get_instance( ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_get_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]): The request object. Request for @@ -392,6 +431,37 @@ async def create_instance( ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_create_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + resource = memcache_v1beta2.Instance() + resource.name = "name_value" + resource.node_count = 1070 + resource.node_config.cpu_count = 976 + resource.node_config.memory_size_mb = 1505 + + request = memcache_v1beta2.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + resource=resource, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]): The request object. Request for @@ -504,6 +574,36 @@ async def update_instance( r"""Updates an existing Instance in a given project and location. + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_update_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + resource = memcache_v1beta2.Instance() + resource.name = "name_value" + resource.node_count = 1070 + resource.node_config.cpu_count = 976 + resource.node_config.memory_size_mb = 1505 + + request = memcache_v1beta2.UpdateInstanceRequest( + resource=resource, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]): The request object. Request for @@ -603,6 +703,30 @@ async def update_parameters( followed by ``ApplyParameters`` to apply the parameters to nodes of the Memcached instance. + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_update_parameters(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.UpdateParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.update_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]): The request object. Request for @@ -702,6 +826,29 @@ async def delete_instance( ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_delete_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]): The request object. Request for @@ -800,6 +947,30 @@ async def apply_parameters( to update them to the current set of parameters for the Memcached Instance. + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_apply_parameters(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ApplyParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.apply_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]): The request object. Request for @@ -906,6 +1077,30 @@ async def apply_software_update( r"""Updates software on the selected nodes of the Instance. + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_apply_software_update(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ApplySoftwareUpdateRequest( + instance="instance_value", + ) + + # Make the request + operation = client.apply_software_update(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]): The request object. Request for diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index fa4113d..012aadb 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -432,6 +432,27 @@ def list_instances( ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_list_instances(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]): The request object. Request for @@ -514,6 +535,26 @@ def get_instance( ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_get_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]): The request object. Request for @@ -587,6 +628,38 @@ def create_instance( ) -> operation.Operation: r"""Creates a new Instance in a given location. + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_create_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + resource = memcache_v1beta2.Instance() + resource.name = "name_value" + resource.node_count = 1070 + resource.node_config.cpu_count = 976 + resource.node_config.memory_size_mb = 1505 + + request = memcache_v1beta2.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + resource=resource, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]): The request object. Request for @@ -699,6 +772,37 @@ def update_instance( r"""Updates an existing Instance in a given project and location. + + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_update_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + resource = memcache_v1beta2.Instance() + resource.name = "name_value" + resource.node_count = 1070 + resource.node_config.cpu_count = 976 + resource.node_config.memory_size_mb = 1505 + + request = memcache_v1beta2.UpdateInstanceRequest( + resource=resource, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]): The request object. Request for @@ -798,6 +902,31 @@ def update_parameters( followed by ``ApplyParameters`` to apply the parameters to nodes of the Memcached instance. + + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_update_parameters(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.UpdateParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.update_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]): The request object. Request for @@ -897,6 +1026,30 @@ def delete_instance( ) -> operation.Operation: r"""Deletes a single Instance. + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_delete_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]): The request object. Request for @@ -995,6 +1148,31 @@ def apply_parameters( to update them to the current set of parameters for the Memcached Instance. + + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_apply_parameters(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ApplyParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.apply_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]): The request object. Request for @@ -1101,6 +1279,31 @@ def apply_software_update( r"""Updates software on the selected nodes of the Instance. + + + .. code-block:: + + from google.cloud import memcache_v1beta2 + + def sample_apply_software_update(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ApplySoftwareUpdateRequest( + instance="instance_value", + ) + + # Make the request + operation = client.apply_software_update(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]): The request object. Request for diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py new file mode 100644 index 0000000..3d262e1 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyParameters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_async] +from google.cloud import memcache_v1 + + +async def sample_apply_parameters(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.ApplyParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.apply_parameters(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py new file mode 100644 index 0000000..3da8516 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyParameters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_sync] +from google.cloud import memcache_v1 + + +def sample_apply_parameters(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.ApplyParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.apply_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py new file mode 100644 index 0000000..b37f0bf --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_CreateInstance_async] +from google.cloud import memcache_v1 + + +async def sample_create_instance(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + instance = memcache_v1.Instance() + instance.name = "name_value" + instance.node_count = 1070 + instance.node_config.cpu_count = 976 + instance.node_config.memory_size_mb = 1505 + + request = memcache_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_CreateInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py new file mode 100644 index 0000000..ebcdb7a --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_CreateInstance_sync] +from google.cloud import memcache_v1 + + +def sample_create_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + instance = memcache_v1.Instance() + instance.name = "name_value" + instance.node_count = 1070 + instance.node_config.cpu_count = 976 + instance.node_config.memory_size_mb = 1505 + + request = memcache_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_CreateInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py new file mode 100644 index 0000000..d7223c9 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_async] +from google.cloud import memcache_v1 + + +async def sample_delete_instance(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py new file mode 100644 index 0000000..1cd1f37 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_sync] +from google.cloud import memcache_v1 + + +def sample_delete_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py new file mode 100644 index 0000000..f9bf355 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_GetInstance_async] +from google.cloud import memcache_v1 + + +async def sample_get_instance(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_GetInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py new file mode 100644 index 0000000..1e403ce --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_GetInstance_sync] +from google.cloud import memcache_v1 + + +def sample_get_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_GetInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py new file mode 100644 index 0000000..e2783a9 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_ListInstances_async] +from google.cloud import memcache_v1 + + +async def sample_list_instances(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_ListInstances_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py new file mode 100644 index 0000000..dee17e7 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_ListInstances_sync] +from google.cloud import memcache_v1 + + +def sample_list_instances(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_ListInstances_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py new file mode 100644 index 0000000..6f0221e --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_async] +from google.cloud import memcache_v1 + + +async def sample_update_instance(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + instance = memcache_v1.Instance() + instance.name = "name_value" + instance.node_count = 1070 + instance.node_config.cpu_count = 976 + instance.node_config.memory_size_mb = 1505 + + request = memcache_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py new file mode 100644 index 0000000..400eb92 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_sync] +from google.cloud import memcache_v1 + + +def sample_update_instance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + instance = memcache_v1.Instance() + instance.name = "name_value" + instance.node_count = 1070 + instance.node_config.cpu_count = 976 + instance.node_config.memory_size_mb = 1505 + + request = memcache_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py new file mode 100644 index 0000000..b9a51ba --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateParameters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_async] +from google.cloud import memcache_v1 + + +async def sample_update_parameters(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.UpdateParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.update_parameters(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py new file mode 100644 index 0000000..ef9139e --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateParameters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_sync] +from google.cloud import memcache_v1 + + +def sample_update_parameters(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.UpdateParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.update_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py new file mode 100644 index 0000000..34cd90e --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyParameters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_async] +from google.cloud import memcache_v1beta2 + + +async def sample_apply_parameters(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ApplyParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.apply_parameters(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py new file mode 100644 index 0000000..e380a23 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyParameters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_sync] +from google.cloud import memcache_v1beta2 + + +def sample_apply_parameters(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ApplyParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.apply_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py new file mode 100644 index 0000000..c11bfcc --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplySoftwareUpdate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_async] +from google.cloud import memcache_v1beta2 + + +async def sample_apply_software_update(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ApplySoftwareUpdateRequest( + instance="instance_value", + ) + + # Make the request + operation = client.apply_software_update(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py new file mode 100644 index 0000000..4b0d7a6 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplySoftwareUpdate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_sync] +from google.cloud import memcache_v1beta2 + + +def sample_apply_software_update(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ApplySoftwareUpdateRequest( + instance="instance_value", + ) + + # Make the request + operation = client.apply_software_update(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py new file mode 100644 index 0000000..20c42fe --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_async] +from google.cloud import memcache_v1beta2 + + +async def sample_create_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + resource = memcache_v1beta2.Instance() + resource.name = "name_value" + resource.node_count = 1070 + resource.node_config.cpu_count = 976 + resource.node_config.memory_size_mb = 1505 + + request = memcache_v1beta2.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + resource=resource, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py new file mode 100644 index 0000000..8eb697c --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_sync] +from google.cloud import memcache_v1beta2 + + +def sample_create_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + resource = memcache_v1beta2.Instance() + resource.name = "name_value" + resource.node_count = 1070 + resource.node_config.cpu_count = 976 + resource.node_config.memory_size_mb = 1505 + + request = memcache_v1beta2.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + resource=resource, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py new file mode 100644 index 0000000..d8223f9 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_async] +from google.cloud import memcache_v1beta2 + + +async def sample_delete_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py new file mode 100644 index 0000000..59a9c10 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_sync] +from google.cloud import memcache_v1beta2 + + +def sample_delete_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py new file mode 100644 index 0000000..b72a0fa --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_async] +from google.cloud import memcache_v1beta2 + + +async def sample_get_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py new file mode 100644 index 0000000..e35c369 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_sync] +from google.cloud import memcache_v1beta2 + + +def sample_get_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py new file mode 100644 index 0000000..2032768 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_async] +from google.cloud import memcache_v1beta2 + + +async def sample_list_instances(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py new file mode 100644 index 0000000..4bdc296 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_sync] +from google.cloud import memcache_v1beta2 + + +def sample_list_instances(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py new file mode 100644 index 0000000..09d0b9a --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_async] +from google.cloud import memcache_v1beta2 + + +async def sample_update_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + resource = memcache_v1beta2.Instance() + resource.name = "name_value" + resource.node_count = 1070 + resource.node_config.cpu_count = 976 + resource.node_config.memory_size_mb = 1505 + + request = memcache_v1beta2.UpdateInstanceRequest( + resource=resource, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py new file mode 100644 index 0000000..f1d3505 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_sync] +from google.cloud import memcache_v1beta2 + + +def sample_update_instance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + resource = memcache_v1beta2.Instance() + resource.name = "name_value" + resource.node_count = 1070 + resource.node_config.cpu_count = 976 + resource.node_config.memory_size_mb = 1505 + + request = memcache_v1beta2.UpdateInstanceRequest( + resource=resource, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py new file mode 100644 index 0000000..df411b3 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateParameters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_async] +from google.cloud import memcache_v1beta2 + + +async def sample_update_parameters(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.UpdateParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.update_parameters(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py new file mode 100644 index 0000000..faebb21 --- /dev/null +++ b/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateParameters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_sync] +from google.cloud import memcache_v1beta2 + + +def sample_update_parameters(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.UpdateParametersRequest( + name="name_value", + ) + + # Make the request + operation = client.update_parameters(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_sync] diff --git a/samples/generated_samples/snippet_metadata_memcache_v1.json b/samples/generated_samples/snippet_metadata_memcache_v1.json new file mode 100644 index 0000000..878a7c8 --- /dev/null +++ b/samples/generated_samples/snippet_metadata_memcache_v1.json @@ -0,0 +1,627 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ApplyParameters" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ApplyParameters" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "CreateInstance" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_CreateInstance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "CreateInstance" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_CreateInstance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "DeleteInstance" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "DeleteInstance" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "GetInstance" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_GetInstance_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "GetInstance" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_GetInstance_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ListInstances" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_ListInstances_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ListInstances" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_ListInstances_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "UpdateInstance" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "UpdateInstance" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "UpdateParameters" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "UpdateParameters" + } + }, + "file": "memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py", + "regionTag": "memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json b/samples/generated_samples/snippet_metadata_memcache_v1beta2.json new file mode 100644 index 0000000..b0cdb27 --- /dev/null +++ b/samples/generated_samples/snippet_metadata_memcache_v1beta2.json @@ -0,0 +1,716 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ApplyParameters" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ApplyParameters" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ApplySoftwareUpdate" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ApplySoftwareUpdate" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "CreateInstance" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "CreateInstance" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "DeleteInstance" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "DeleteInstance" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "GetInstance" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "GetInstance" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ListInstances" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "ListInstances" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "UpdateInstance" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "UpdateInstance" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "UpdateParameters" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudMemcache" + }, + "shortName": "UpdateParameters" + } + }, + "file": "memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py", + "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} From 5aea133da0926403e736b69a94456a966e7a1dce Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 25 Feb 2022 10:40:31 -0700 Subject: [PATCH 082/159] chore: use gapic-generator-python 0.63.4 (#141) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.63.4 chore: fix snippet region tag format chore: fix docstring code block formatting PiperOrigin-RevId: 430730865 Source-Link: https://github.com/googleapis/googleapis/commit/ea5800229f73f94fd7204915a86ed09dcddf429a Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca893ff8af25fc7fe001de1405a517d80446ecca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2E4OTNmZjhhZjI1ZmM3ZmUwMDFkZTE0MDVhNTE3ZDgwNDQ2ZWNjYSJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: delete duplicates Co-authored-by: Owl Bot Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .../services/cloud_memcache/async_client.py | 14 ++-- .../services/cloud_memcache/client.py | 21 ++---- .../services/cloud_memcache/async_client.py | 16 ++--- .../services/cloud_memcache/client.py | 24 +++---- ..._cloud_memcache_apply_parameters_async.py} | 4 +- ...d_cloud_memcache_apply_parameters_sync.py} | 4 +- ...d_cloud_memcache_create_instance_async.py} | 4 +- ...ed_cloud_memcache_create_instance_sync.py} | 4 +- ...d_cloud_memcache_delete_instance_async.py} | 4 +- ...ed_cloud_memcache_delete_instance_sync.py} | 4 +- ...ated_cloud_memcache_get_instance_async.py} | 4 +- ...rated_cloud_memcache_get_instance_sync.py} | 4 +- ...ed_cloud_memcache_list_instances_async.py} | 4 +- ...ted_cloud_memcache_list_instances_sync.py} | 4 +- ...d_cloud_memcache_update_instance_async.py} | 4 +- ...ed_cloud_memcache_update_instance_sync.py} | 4 +- ...cloud_memcache_update_parameters_async.py} | 4 +- ..._cloud_memcache_update_parameters_sync.py} | 4 +- ..._cloud_memcache_apply_parameters_async.py} | 4 +- ...d_cloud_memcache_apply_parameters_sync.py} | 4 +- ...d_memcache_apply_software_update_async.py} | 4 +- ...ud_memcache_apply_software_update_sync.py} | 4 +- ...d_cloud_memcache_create_instance_async.py} | 4 +- ...ed_cloud_memcache_create_instance_sync.py} | 4 +- ...d_cloud_memcache_delete_instance_async.py} | 4 +- ...ed_cloud_memcache_delete_instance_sync.py} | 4 +- ...ated_cloud_memcache_get_instance_async.py} | 4 +- ...rated_cloud_memcache_get_instance_sync.py} | 4 +- ...ed_cloud_memcache_list_instances_async.py} | 4 +- ...ted_cloud_memcache_list_instances_sync.py} | 4 +- ...d_cloud_memcache_update_instance_async.py} | 4 +- ...ed_cloud_memcache_update_instance_sync.py} | 4 +- ...cloud_memcache_update_parameters_async.py} | 4 +- ..._cloud_memcache_update_parameters_sync.py} | 4 +- .../snippet_metadata_memcache_v1.json | 56 ++++++++-------- .../snippet_metadata_memcache_v1beta2.json | 64 +++++++++---------- 36 files changed, 150 insertions(+), 165 deletions(-) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py => memcache_v1_generated_cloud_memcache_apply_parameters_async.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py => memcache_v1_generated_cloud_memcache_apply_parameters_sync.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py => memcache_v1_generated_cloud_memcache_create_instance_async.py} (91%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py => memcache_v1_generated_cloud_memcache_create_instance_sync.py} (91%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py => memcache_v1_generated_cloud_memcache_delete_instance_async.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py => memcache_v1_generated_cloud_memcache_delete_instance_sync.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py => memcache_v1_generated_cloud_memcache_get_instance_async.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py => memcache_v1_generated_cloud_memcache_get_instance_sync.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py => memcache_v1_generated_cloud_memcache_list_instances_async.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py => memcache_v1_generated_cloud_memcache_list_instances_sync.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py => memcache_v1_generated_cloud_memcache_update_instance_async.py} (91%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py => memcache_v1_generated_cloud_memcache_update_instance_sync.py} (91%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py => memcache_v1_generated_cloud_memcache_update_parameters_async.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py => memcache_v1_generated_cloud_memcache_update_parameters_sync.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py => memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py => memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py => memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py => memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py => memcache_v1beta2_generated_cloud_memcache_create_instance_async.py} (91%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py => memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py} (91%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py => memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py => memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py} (90%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py => memcache_v1beta2_generated_cloud_memcache_get_instance_async.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py => memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py => memcache_v1beta2_generated_cloud_memcache_list_instances_async.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py => memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py => memcache_v1beta2_generated_cloud_memcache_update_instance_async.py} (91%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py => memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py} (91%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py => memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py} (89%) rename samples/generated_samples/{memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py => memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py} (89%) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index f8699da..9025aa5 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -237,7 +237,7 @@ async def list_instances( ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -339,7 +339,7 @@ async def get_instance( ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -431,7 +431,7 @@ async def create_instance( ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -574,7 +574,7 @@ async def update_instance( location. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -702,7 +702,7 @@ async def update_parameters( apply the parameters to nodes of the Memcached Instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -823,7 +823,7 @@ async def delete_instance( ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -945,7 +945,7 @@ async def apply_parameters( parameters for the Memcached Instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index c0129a3..8079b95 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -432,8 +432,7 @@ def list_instances( ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -535,8 +534,7 @@ def get_instance( ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -628,8 +626,7 @@ def create_instance( ) -> operation.Operation: r"""Creates a new Instance in a given location. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -772,8 +769,7 @@ def update_instance( location. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -901,8 +897,7 @@ def update_parameters( apply the parameters to nodes of the Memcached Instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -1023,8 +1018,7 @@ def delete_instance( ) -> operation.Operation: r"""Deletes a single Instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 @@ -1146,8 +1140,7 @@ def apply_parameters( parameters for the Memcached Instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1 diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 34b8605..6a8401c 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -237,7 +237,7 @@ async def list_instances( ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -339,7 +339,7 @@ async def get_instance( ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -431,7 +431,7 @@ async def create_instance( ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -575,7 +575,7 @@ async def update_instance( location. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -704,7 +704,7 @@ async def update_parameters( of the Memcached instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -826,7 +826,7 @@ async def delete_instance( ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -948,7 +948,7 @@ async def apply_parameters( Memcached Instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -1078,7 +1078,7 @@ async def apply_software_update( Instance. - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 012aadb..12232c5 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -432,8 +432,7 @@ def list_instances( ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -535,8 +534,7 @@ def get_instance( ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -628,8 +626,7 @@ def create_instance( ) -> operation.Operation: r"""Creates a new Instance in a given location. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -773,8 +770,7 @@ def update_instance( location. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -903,8 +899,7 @@ def update_parameters( of the Memcached instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -1026,8 +1021,7 @@ def delete_instance( ) -> operation.Operation: r"""Deletes a single Instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -1149,8 +1143,7 @@ def apply_parameters( Memcached Instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 @@ -1280,8 +1273,7 @@ def apply_software_update( Instance. - - .. code-block:: + .. code-block:: python from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py index 3d262e1..34033e5 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_async] +# [START memcache_v1_generated_CloudMemcache_ApplyParameters_async] from google.cloud import memcache_v1 @@ -46,4 +46,4 @@ async def sample_apply_parameters(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_async] +# [END memcache_v1_generated_CloudMemcache_ApplyParameters_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py index 3da8516..994b73c 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_sync] +# [START memcache_v1_generated_CloudMemcache_ApplyParameters_sync] from google.cloud import memcache_v1 @@ -46,4 +46,4 @@ def sample_apply_parameters(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_sync] +# [END memcache_v1_generated_CloudMemcache_ApplyParameters_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py similarity index 91% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py index b37f0bf..3830084 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_CreateInstance_async] +# [START memcache_v1_generated_CloudMemcache_CreateInstance_async] from google.cloud import memcache_v1 @@ -54,4 +54,4 @@ async def sample_create_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_CreateInstance_async] +# [END memcache_v1_generated_CloudMemcache_CreateInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py similarity index 91% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py index ebcdb7a..ab28595 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_CreateInstance_sync] +# [START memcache_v1_generated_CloudMemcache_CreateInstance_sync] from google.cloud import memcache_v1 @@ -54,4 +54,4 @@ def sample_create_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_CreateInstance_sync] +# [END memcache_v1_generated_CloudMemcache_CreateInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py index d7223c9..4a95e1d 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_async] +# [START memcache_v1_generated_CloudMemcache_DeleteInstance_async] from google.cloud import memcache_v1 @@ -46,4 +46,4 @@ async def sample_delete_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_async] +# [END memcache_v1_generated_CloudMemcache_DeleteInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py index 1cd1f37..e82377e 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_sync] +# [START memcache_v1_generated_CloudMemcache_DeleteInstance_sync] from google.cloud import memcache_v1 @@ -46,4 +46,4 @@ def sample_delete_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_sync] +# [END memcache_v1_generated_CloudMemcache_DeleteInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py index f9bf355..2c61825 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_GetInstance_async] +# [START memcache_v1_generated_CloudMemcache_GetInstance_async] from google.cloud import memcache_v1 @@ -42,4 +42,4 @@ async def sample_get_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_GetInstance_async] +# [END memcache_v1_generated_CloudMemcache_GetInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py index 1e403ce..fd70c0f 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_GetInstance_sync] +# [START memcache_v1_generated_CloudMemcache_GetInstance_sync] from google.cloud import memcache_v1 @@ -42,4 +42,4 @@ def sample_get_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_GetInstance_sync] +# [END memcache_v1_generated_CloudMemcache_GetInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py index e2783a9..fc31089 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_ListInstances_async] +# [START memcache_v1_generated_CloudMemcache_ListInstances_async] from google.cloud import memcache_v1 @@ -43,4 +43,4 @@ async def sample_list_instances(): async for response in page_result: print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_ListInstances_async] +# [END memcache_v1_generated_CloudMemcache_ListInstances_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py index dee17e7..6dc02f1 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_ListInstances_sync] +# [START memcache_v1_generated_CloudMemcache_ListInstances_sync] from google.cloud import memcache_v1 @@ -43,4 +43,4 @@ def sample_list_instances(): for response in page_result: print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_ListInstances_sync] +# [END memcache_v1_generated_CloudMemcache_ListInstances_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py similarity index 91% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py index 6f0221e..eaa6444 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_async] +# [START memcache_v1_generated_CloudMemcache_UpdateInstance_async] from google.cloud import memcache_v1 @@ -52,4 +52,4 @@ async def sample_update_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_async] +# [END memcache_v1_generated_CloudMemcache_UpdateInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py similarity index 91% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py index 400eb92..124f384 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_sync] +# [START memcache_v1_generated_CloudMemcache_UpdateInstance_sync] from google.cloud import memcache_v1 @@ -52,4 +52,4 @@ def sample_update_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_sync] +# [END memcache_v1_generated_CloudMemcache_UpdateInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py index b9a51ba..09bc694 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_async] +# [START memcache_v1_generated_CloudMemcache_UpdateParameters_async] from google.cloud import memcache_v1 @@ -46,4 +46,4 @@ async def sample_update_parameters(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_async] +# [END memcache_v1_generated_CloudMemcache_UpdateParameters_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py rename to samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py index ef9139e..019f107 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_sync] +# [START memcache_v1_generated_CloudMemcache_UpdateParameters_sync] from google.cloud import memcache_v1 @@ -46,4 +46,4 @@ def sample_update_parameters(): # Handle the response print(response) -# [END memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_sync] +# [END memcache_v1_generated_CloudMemcache_UpdateParameters_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py index 34cd90e..cf0e191 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_async] +# [START memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async] from google.cloud import memcache_v1beta2 @@ -46,4 +46,4 @@ async def sample_apply_parameters(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_async] +# [END memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py index e380a23..9023c43 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_sync] +# [START memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync] from google.cloud import memcache_v1beta2 @@ -46,4 +46,4 @@ def sample_apply_parameters(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_sync] +# [END memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py index c11bfcc..e1da1a4 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_async] +# [START memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async] from google.cloud import memcache_v1beta2 @@ -46,4 +46,4 @@ async def sample_apply_software_update(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_async] +# [END memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py index 4b0d7a6..92a15da 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_sync] +# [START memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync] from google.cloud import memcache_v1beta2 @@ -46,4 +46,4 @@ def sample_apply_software_update(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_sync] +# [END memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py similarity index 91% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py index 20c42fe..e3fc3c9 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_async] +# [START memcache_v1beta2_generated_CloudMemcache_CreateInstance_async] from google.cloud import memcache_v1beta2 @@ -54,4 +54,4 @@ async def sample_create_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_async] +# [END memcache_v1beta2_generated_CloudMemcache_CreateInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py similarity index 91% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py index 8eb697c..a6bff68 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_sync] +# [START memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync] from google.cloud import memcache_v1beta2 @@ -54,4 +54,4 @@ def sample_create_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_sync] +# [END memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py index d8223f9..4b33cae 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_async] +# [START memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async] from google.cloud import memcache_v1beta2 @@ -46,4 +46,4 @@ async def sample_delete_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_async] +# [END memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py similarity index 90% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py index 59a9c10..353953f 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_sync] +# [START memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync] from google.cloud import memcache_v1beta2 @@ -46,4 +46,4 @@ def sample_delete_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_sync] +# [END memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py index b72a0fa..0e238ae 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_async] +# [START memcache_v1beta2_generated_CloudMemcache_GetInstance_async] from google.cloud import memcache_v1beta2 @@ -42,4 +42,4 @@ async def sample_get_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_async] +# [END memcache_v1beta2_generated_CloudMemcache_GetInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py index e35c369..9931d68 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_sync] +# [START memcache_v1beta2_generated_CloudMemcache_GetInstance_sync] from google.cloud import memcache_v1beta2 @@ -42,4 +42,4 @@ def sample_get_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_sync] +# [END memcache_v1beta2_generated_CloudMemcache_GetInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py index 2032768..0a07260 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_async] +# [START memcache_v1beta2_generated_CloudMemcache_ListInstances_async] from google.cloud import memcache_v1beta2 @@ -43,4 +43,4 @@ async def sample_list_instances(): async for response in page_result: print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_async] +# [END memcache_v1beta2_generated_CloudMemcache_ListInstances_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py index 4bdc296..5319d49 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_sync] +# [START memcache_v1beta2_generated_CloudMemcache_ListInstances_sync] from google.cloud import memcache_v1beta2 @@ -43,4 +43,4 @@ def sample_list_instances(): for response in page_result: print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_sync] +# [END memcache_v1beta2_generated_CloudMemcache_ListInstances_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py similarity index 91% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py index 09d0b9a..e1ee606 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_async] +# [START memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async] from google.cloud import memcache_v1beta2 @@ -52,4 +52,4 @@ async def sample_update_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_async] +# [END memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py similarity index 91% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py index f1d3505..1e1b290 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_sync] +# [START memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync] from google.cloud import memcache_v1beta2 @@ -52,4 +52,4 @@ def sample_update_instance(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_sync] +# [END memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py index df411b3..2a76234 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_async] +# [START memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async] from google.cloud import memcache_v1beta2 @@ -46,4 +46,4 @@ async def sample_update_parameters(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_async] +# [END memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async] diff --git a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py similarity index 89% rename from samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py rename to samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py index faebb21..d279127 100644 --- a/samples/generated_samples/memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-memcache -# [START memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_sync] +# [START memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync] from google.cloud import memcache_v1beta2 @@ -46,4 +46,4 @@ def sample_update_parameters(): # Handle the response print(response) -# [END memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_sync] +# [END memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync] diff --git a/samples/generated_samples/snippet_metadata_memcache_v1.json b/samples/generated_samples/snippet_metadata_memcache_v1.json index 878a7c8..a8d58f6 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1.json +++ b/samples/generated_samples/snippet_metadata_memcache_v1.json @@ -10,8 +10,8 @@ "shortName": "ApplyParameters" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_apply_parameters_async.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_async", + "file": "memcache_v1_generated_cloud_memcache_apply_parameters_async.py", + "regionTag": "memcache_v1_generated_CloudMemcache_ApplyParameters_async", "segments": [ { "end": 48, @@ -54,8 +54,8 @@ "shortName": "ApplyParameters" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_apply_parameters_sync.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_ApplyParameters_sync", + "file": "memcache_v1_generated_cloud_memcache_apply_parameters_sync.py", + "regionTag": "memcache_v1_generated_CloudMemcache_ApplyParameters_sync", "segments": [ { "end": 48, @@ -99,8 +99,8 @@ "shortName": "CreateInstance" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_create_instance_async.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_CreateInstance_async", + "file": "memcache_v1_generated_cloud_memcache_create_instance_async.py", + "regionTag": "memcache_v1_generated_CloudMemcache_CreateInstance_async", "segments": [ { "end": 56, @@ -143,8 +143,8 @@ "shortName": "CreateInstance" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_create_instance_sync.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_CreateInstance_sync", + "file": "memcache_v1_generated_cloud_memcache_create_instance_sync.py", + "regionTag": "memcache_v1_generated_CloudMemcache_CreateInstance_sync", "segments": [ { "end": 56, @@ -188,8 +188,8 @@ "shortName": "DeleteInstance" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_delete_instance_async.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_async", + "file": "memcache_v1_generated_cloud_memcache_delete_instance_async.py", + "regionTag": "memcache_v1_generated_CloudMemcache_DeleteInstance_async", "segments": [ { "end": 48, @@ -232,8 +232,8 @@ "shortName": "DeleteInstance" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_delete_instance_sync.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_DeleteInstance_sync", + "file": "memcache_v1_generated_cloud_memcache_delete_instance_sync.py", + "regionTag": "memcache_v1_generated_CloudMemcache_DeleteInstance_sync", "segments": [ { "end": 48, @@ -277,8 +277,8 @@ "shortName": "GetInstance" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_get_instance_async.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_GetInstance_async", + "file": "memcache_v1_generated_cloud_memcache_get_instance_async.py", + "regionTag": "memcache_v1_generated_CloudMemcache_GetInstance_async", "segments": [ { "end": 44, @@ -321,8 +321,8 @@ "shortName": "GetInstance" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_get_instance_sync.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_GetInstance_sync", + "file": "memcache_v1_generated_cloud_memcache_get_instance_sync.py", + "regionTag": "memcache_v1_generated_CloudMemcache_GetInstance_sync", "segments": [ { "end": 44, @@ -366,8 +366,8 @@ "shortName": "ListInstances" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_list_instances_async.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_ListInstances_async", + "file": "memcache_v1_generated_cloud_memcache_list_instances_async.py", + "regionTag": "memcache_v1_generated_CloudMemcache_ListInstances_async", "segments": [ { "end": 45, @@ -410,8 +410,8 @@ "shortName": "ListInstances" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_list_instances_sync.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_ListInstances_sync", + "file": "memcache_v1_generated_cloud_memcache_list_instances_sync.py", + "regionTag": "memcache_v1_generated_CloudMemcache_ListInstances_sync", "segments": [ { "end": 45, @@ -455,8 +455,8 @@ "shortName": "UpdateInstance" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_update_instance_async.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_async", + "file": "memcache_v1_generated_cloud_memcache_update_instance_async.py", + "regionTag": "memcache_v1_generated_CloudMemcache_UpdateInstance_async", "segments": [ { "end": 54, @@ -499,8 +499,8 @@ "shortName": "UpdateInstance" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_update_instance_sync.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_UpdateInstance_sync", + "file": "memcache_v1_generated_cloud_memcache_update_instance_sync.py", + "regionTag": "memcache_v1_generated_CloudMemcache_UpdateInstance_sync", "segments": [ { "end": 54, @@ -544,8 +544,8 @@ "shortName": "UpdateParameters" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_update_parameters_async.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_async", + "file": "memcache_v1_generated_cloud_memcache_update_parameters_async.py", + "regionTag": "memcache_v1_generated_CloudMemcache_UpdateParameters_async", "segments": [ { "end": 48, @@ -588,8 +588,8 @@ "shortName": "UpdateParameters" } }, - "file": "memcache_generated_memcache_v1_cloud_memcache_update_parameters_sync.py", - "regionTag": "memcache_generated_memcache_v1_CloudMemcache_UpdateParameters_sync", + "file": "memcache_v1_generated_cloud_memcache_update_parameters_sync.py", + "regionTag": "memcache_v1_generated_CloudMemcache_UpdateParameters_sync", "segments": [ { "end": 48, diff --git a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json b/samples/generated_samples/snippet_metadata_memcache_v1beta2.json index b0cdb27..a62f8ef 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json +++ b/samples/generated_samples/snippet_metadata_memcache_v1beta2.json @@ -10,8 +10,8 @@ "shortName": "ApplyParameters" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_async.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_async", + "file": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async", "segments": [ { "end": 48, @@ -54,8 +54,8 @@ "shortName": "ApplyParameters" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_apply_parameters_sync.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ApplyParameters_sync", + "file": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync", "segments": [ { "end": 48, @@ -99,8 +99,8 @@ "shortName": "ApplySoftwareUpdate" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_async.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_async", + "file": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async", "segments": [ { "end": 48, @@ -143,8 +143,8 @@ "shortName": "ApplySoftwareUpdate" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_apply_software_update_sync.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ApplySoftwareUpdate_sync", + "file": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync", "segments": [ { "end": 48, @@ -188,8 +188,8 @@ "shortName": "CreateInstance" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_async.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_async", + "file": "memcache_v1beta2_generated_cloud_memcache_create_instance_async.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_CreateInstance_async", "segments": [ { "end": 56, @@ -232,8 +232,8 @@ "shortName": "CreateInstance" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_create_instance_sync.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_CreateInstance_sync", + "file": "memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync", "segments": [ { "end": 56, @@ -277,8 +277,8 @@ "shortName": "DeleteInstance" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_async.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_async", + "file": "memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async", "segments": [ { "end": 48, @@ -321,8 +321,8 @@ "shortName": "DeleteInstance" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_delete_instance_sync.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_DeleteInstance_sync", + "file": "memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync", "segments": [ { "end": 48, @@ -366,8 +366,8 @@ "shortName": "GetInstance" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_async.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_async", + "file": "memcache_v1beta2_generated_cloud_memcache_get_instance_async.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_GetInstance_async", "segments": [ { "end": 44, @@ -410,8 +410,8 @@ "shortName": "GetInstance" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_get_instance_sync.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_GetInstance_sync", + "file": "memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_GetInstance_sync", "segments": [ { "end": 44, @@ -455,8 +455,8 @@ "shortName": "ListInstances" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_async.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_async", + "file": "memcache_v1beta2_generated_cloud_memcache_list_instances_async.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_ListInstances_async", "segments": [ { "end": 45, @@ -499,8 +499,8 @@ "shortName": "ListInstances" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_list_instances_sync.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_ListInstances_sync", + "file": "memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_ListInstances_sync", "segments": [ { "end": 45, @@ -544,8 +544,8 @@ "shortName": "UpdateInstance" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_async.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_async", + "file": "memcache_v1beta2_generated_cloud_memcache_update_instance_async.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async", "segments": [ { "end": 54, @@ -588,8 +588,8 @@ "shortName": "UpdateInstance" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_update_instance_sync.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_UpdateInstance_sync", + "file": "memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync", "segments": [ { "end": 54, @@ -633,8 +633,8 @@ "shortName": "UpdateParameters" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_async.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_async", + "file": "memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async", "segments": [ { "end": 48, @@ -677,8 +677,8 @@ "shortName": "UpdateParameters" } }, - "file": "memcache_generated_memcache_v1beta2_cloud_memcache_update_parameters_sync.py", - "regionTag": "memcache_generated_memcache_v1beta2_CloudMemcache_UpdateParameters_sync", + "file": "memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync", "segments": [ { "end": 48, From 0a3f92c74df65998c59dab3f74d5e4bebc396ef3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Feb 2022 05:50:21 -0500 Subject: [PATCH 083/159] chore: update copyright year to 2022 (#142) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update copyright year to 2022 PiperOrigin-RevId: 431037888 Source-Link: https://github.com/googleapis/googleapis/commit/b3397f5febbf21dfc69b875ddabaf76bee765058 Source-Link: https://github.com/googleapis/googleapis-gen/commit/510b54e1cdefd53173984df16645081308fe897e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTEwYjU0ZTFjZGVmZDUzMTczOTg0ZGYxNjY0NTA4MTMwOGZlODk3ZSJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- google/cloud/memcache/__init__.py | 2 +- google/cloud/memcache_v1/__init__.py | 2 +- google/cloud/memcache_v1/services/__init__.py | 2 +- google/cloud/memcache_v1/services/cloud_memcache/__init__.py | 2 +- .../cloud/memcache_v1/services/cloud_memcache/async_client.py | 2 +- google/cloud/memcache_v1/services/cloud_memcache/client.py | 2 +- google/cloud/memcache_v1/services/cloud_memcache/pagers.py | 2 +- .../memcache_v1/services/cloud_memcache/transports/__init__.py | 2 +- .../memcache_v1/services/cloud_memcache/transports/base.py | 2 +- .../memcache_v1/services/cloud_memcache/transports/grpc.py | 2 +- .../services/cloud_memcache/transports/grpc_asyncio.py | 2 +- google/cloud/memcache_v1/types/__init__.py | 2 +- google/cloud/memcache_v1/types/cloud_memcache.py | 2 +- google/cloud/memcache_v1beta2/__init__.py | 2 +- google/cloud/memcache_v1beta2/services/__init__.py | 2 +- .../cloud/memcache_v1beta2/services/cloud_memcache/__init__.py | 2 +- .../memcache_v1beta2/services/cloud_memcache/async_client.py | 2 +- google/cloud/memcache_v1beta2/services/cloud_memcache/client.py | 2 +- google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py | 2 +- .../services/cloud_memcache/transports/__init__.py | 2 +- .../memcache_v1beta2/services/cloud_memcache/transports/base.py | 2 +- .../memcache_v1beta2/services/cloud_memcache/transports/grpc.py | 2 +- .../services/cloud_memcache/transports/grpc_asyncio.py | 2 +- google/cloud/memcache_v1beta2/types/__init__.py | 2 +- google/cloud/memcache_v1beta2/types/cloud_memcache.py | 2 +- ...mcache_v1_generated_cloud_memcache_apply_parameters_async.py | 2 +- ...emcache_v1_generated_cloud_memcache_apply_parameters_sync.py | 2 +- ...emcache_v1_generated_cloud_memcache_create_instance_async.py | 2 +- ...memcache_v1_generated_cloud_memcache_create_instance_sync.py | 2 +- ...emcache_v1_generated_cloud_memcache_delete_instance_async.py | 2 +- ...memcache_v1_generated_cloud_memcache_delete_instance_sync.py | 2 +- .../memcache_v1_generated_cloud_memcache_get_instance_async.py | 2 +- .../memcache_v1_generated_cloud_memcache_get_instance_sync.py | 2 +- ...memcache_v1_generated_cloud_memcache_list_instances_async.py | 2 +- .../memcache_v1_generated_cloud_memcache_list_instances_sync.py | 2 +- ...emcache_v1_generated_cloud_memcache_update_instance_async.py | 2 +- ...memcache_v1_generated_cloud_memcache_update_instance_sync.py | 2 +- ...cache_v1_generated_cloud_memcache_update_parameters_async.py | 2 +- ...mcache_v1_generated_cloud_memcache_update_parameters_sync.py | 2 +- ...e_v1beta2_generated_cloud_memcache_apply_parameters_async.py | 2 +- ...he_v1beta2_generated_cloud_memcache_apply_parameters_sync.py | 2 +- ...eta2_generated_cloud_memcache_apply_software_update_async.py | 2 +- ...beta2_generated_cloud_memcache_apply_software_update_sync.py | 2 +- ...he_v1beta2_generated_cloud_memcache_create_instance_async.py | 2 +- ...che_v1beta2_generated_cloud_memcache_create_instance_sync.py | 2 +- ...he_v1beta2_generated_cloud_memcache_delete_instance_async.py | 2 +- ...che_v1beta2_generated_cloud_memcache_delete_instance_sync.py | 2 +- ...cache_v1beta2_generated_cloud_memcache_get_instance_async.py | 2 +- ...mcache_v1beta2_generated_cloud_memcache_get_instance_sync.py | 2 +- ...che_v1beta2_generated_cloud_memcache_list_instances_async.py | 2 +- ...ache_v1beta2_generated_cloud_memcache_list_instances_sync.py | 2 +- ...he_v1beta2_generated_cloud_memcache_update_instance_async.py | 2 +- ...che_v1beta2_generated_cloud_memcache_update_instance_sync.py | 2 +- ..._v1beta2_generated_cloud_memcache_update_parameters_async.py | 2 +- ...e_v1beta2_generated_cloud_memcache_update_parameters_sync.py | 2 +- scripts/fixup_memcache_v1_keywords.py | 2 +- scripts/fixup_memcache_v1beta2_keywords.py | 2 +- tests/__init__.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/gapic/__init__.py | 2 +- tests/unit/gapic/memcache_v1/__init__.py | 2 +- tests/unit/gapic/memcache_v1/test_cloud_memcache.py | 2 +- tests/unit/gapic/memcache_v1beta2/__init__.py | 2 +- tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py | 2 +- 64 files changed, 64 insertions(+), 64 deletions(-) diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index f2b7c14..9c33853 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py index 7d3016d..02277b0 100644 --- a/google/cloud/memcache_v1/__init__.py +++ b/google/cloud/memcache_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/__init__.py b/google/cloud/memcache_v1/services/__init__.py index 4de6597..e8e1c38 100644 --- a/google/cloud/memcache_v1/services/__init__.py +++ b/google/cloud/memcache_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py index efb245e..64e72f6 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 9025aa5..841b3c7 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 8079b95..ad87e51 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py index 68e9242..45f1d9a 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py index 32ad848..71932bd 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index de1431d..1be13f1 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 531a3a5..52a92dc 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index d23dfaf..26de320 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index 2430991..7c57b55 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index 065ae9c..a66f713 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index 54fa8a3..9ad9112 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/__init__.py b/google/cloud/memcache_v1beta2/services/__init__.py index 4de6597..e8e1c38 100644 --- a/google/cloud/memcache_v1beta2/services/__init__.py +++ b/google/cloud/memcache_v1beta2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py index efb245e..64e72f6 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 6a8401c..b8c094f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 12232c5..c00cab0 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index 617b0a7..30c5652 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py index 32ad848..71932bd 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index ac4e951..ef164f4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index b12ba06..6a163ad 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index d81422c..f5a6a6b 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index a4e788a..005a5bd 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 31be119..dbdf720 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py index 34033e5..dccf400 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py index 994b73c..f6a6bd9 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py index 3830084..44f2f0b 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py index ab28595..3be4489 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py index 4a95e1d..754d26f 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py index e82377e..767510e 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py index 2c61825..f562215 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py index fd70c0f..c470181 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py index fc31089..7516120 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py index 6dc02f1..c5ee139 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py index eaa6444..e078aeb 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py index 124f384..711c064 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py index 09bc694..9020743 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py index 019f107..4044531 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py index cf0e191..eef58f6 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py index 9023c43..159a37c 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py index e1da1a4..7cdcda9 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py index 92a15da..f3d7fef 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py index e3fc3c9..aa17117 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py index a6bff68..f1ec09e 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py index 4b33cae..f7a715b 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py index 353953f..497447a 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py index 0e238ae..a17c364 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py index 9931d68..aa391b1 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py index 0a07260..860b2f0 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py index 5319d49..a4d39a4 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py index e1ee606..a7d90c8 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py index 1e1b290..e3e4e0c 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py index 2a76234..6ddba29 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py index d279127..bea149a 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py index 4acdee8..84308d4 100644 --- a/scripts/fixup_memcache_v1_keywords.py +++ b/scripts/fixup_memcache_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index 59d2e11..4b5aa32 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/__init__.py b/tests/__init__.py index 4de6597..e8e1c38 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 4de6597..e8e1c38 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 4de6597..e8e1c38 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py index 4de6597..e8e1c38 100644 --- a/tests/unit/gapic/memcache_v1/__init__.py +++ b/tests/unit/gapic/memcache_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 9be0c84..774fd46 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/memcache_v1beta2/__init__.py b/tests/unit/gapic/memcache_v1beta2/__init__.py index 4de6597..e8e1c38 100644 --- a/tests/unit/gapic/memcache_v1beta2/__init__.py +++ b/tests/unit/gapic/memcache_v1beta2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index cdafb55..0adabce 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 6c31dea5a0ef8a5dc4f5262feccdaa38a01dc400 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 28 Feb 2022 17:36:35 -0500 Subject: [PATCH 084/159] chore(main): release 1.3.0 (#136) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 12 ++++++++++++ setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 124c154..2b34cdf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [1.3.0](https://github.com/googleapis/python-memcache/compare/v1.2.1...v1.3.0) (2022-02-26) + + +### Features + +* add api key support ([#135](https://github.com/googleapis/python-memcache/issues/135)) ([ef5104e](https://github.com/googleapis/python-memcache/commit/ef5104e0922d980c0023b65665f29f27c14cddcc)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([5f8a2b4](https://github.com/googleapis/python-memcache/commit/5f8a2b4fe5fcc0c4a2be6b9f8529f4ceacbf6421)) + ### [1.2.1](https://www.github.com/googleapis/python-memcache/compare/v1.2.0...v1.2.1) (2021-11-01) diff --git a/setup.py b/setup.py index c23eb67..456e542 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.2.1" +version = "1.3.0" package_root = os.path.abspath(os.path.dirname(__file__)) From de934a234c8e2e4c415ecdc99708a288e4212c5e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 1 Mar 2022 12:28:33 +0000 Subject: [PATCH 085/159] chore(deps): update actions/setup-python action to v3 (#145) Source-Link: https://github.com/googleapis/synthtool/commit/571ee2c3b26182429eddcf115122ee545d7d3787 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 --- .github/.OwlBot.lock.yaml | 3 +-- .github/workflows/docs.yml | 4 ++-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b668c04..d9a55fa 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 - + digest: sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index f7b8344..cca4e98 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1e8b05c..f687324 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 074ee25..d3003e0 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install coverage From ba3fc7bbc974c224fb544ff324747371ccc13015 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Mar 2022 00:38:26 +0000 Subject: [PATCH 086/159] chore(deps): update actions/checkout action to v3 (#147) Source-Link: https://github.com/googleapis/synthtool/commit/ca879097772aeec2cbb971c3cea8ecc81522b68a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 --- .github/.OwlBot.lock.yaml | 2 +- .github/workflows/docs.yml | 4 ++-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d9a55fa..480226a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 + digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index cca4e98..b46d730 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f687324..f512a49 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index d3003e0..e87fe5b 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: python: ['3.6', '3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -37,7 +37,7 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: From fba130344bb07512d8fc0355c2c2da158d9be8ff Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 4 Mar 2022 11:46:07 -0500 Subject: [PATCH 087/159] fix(deps): require google-api-core>=1.31.5, >=2.3.2 (#150) fix(deps): require proto-plus>=1.15.0 --- setup.py | 4 ++-- testing/constraints-3.6.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 456e542..e984d09 100644 --- a/setup.py +++ b/setup.py @@ -43,8 +43,8 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.28.0, <3.0.0dev", - "proto-plus >= 1.4.0", + "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "proto-plus >= 1.15.0", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 1e3ec8b..be5a64f 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -4,5 +4,5 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.28.0 +google-api-core==1.31.5 proto-plus==1.15.0 From 44669a39c707dda1d837bfe2b77724b90c370b80 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Mar 2022 19:39:17 -0500 Subject: [PATCH 088/159] chore(deps): update actions/download-artifact action to v3 (#152) Source-Link: https://github.com/googleapis/synthtool/commit/38e11ad1104dcc1e63b52691ddf2fe4015d06955 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/workflows/unittest.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 480226a..44c78f7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 + digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index e87fe5b..e5be6ed 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -26,7 +26,7 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage-artifacts path: .coverage-${{ matrix.python }} @@ -47,7 +47,7 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: name: coverage-artifacts path: .coverage-results/ From bdedbfc0ec40f03cab201dd9f925fcafce7df7aa Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 10:56:28 -0500 Subject: [PATCH 089/159] chore(main): release 1.3.1 (#151) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 8 ++++++++ setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b34cdf..21c658b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [1.3.1](https://github.com/googleapis/python-memcache/compare/v1.3.0...v1.3.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#150](https://github.com/googleapis/python-memcache/issues/150)) ([fba1303](https://github.com/googleapis/python-memcache/commit/fba130344bb07512d8fc0355c2c2da158d9be8ff)) +* **deps:** require proto-plus>=1.15.0 ([fba1303](https://github.com/googleapis/python-memcache/commit/fba130344bb07512d8fc0355c2c2da158d9be8ff)) + ## [1.3.0](https://github.com/googleapis/python-memcache/compare/v1.2.1...v1.3.0) (2022-02-26) diff --git a/setup.py b/setup.py index e984d09..bdf4c8f 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.3.0" +version = "1.3.1" package_root = os.path.abspath(os.path.dirname(__file__)) From 1268f51e5853cab04109044e96a7e318c16f313b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 23 Mar 2022 11:37:24 -0600 Subject: [PATCH 090/159] chore(python): configure release-please on previous major versions (#153) Source-Link: https://github.com/googleapis/synthtool/commit/c1dd87e9287f8de99930d3046dd555c4d03384c6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2d13c2172a5d6129c861edaa48b60ead15aeaf58aa75e02d870c4cbdfa63aaba Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/release-please.yml | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 44c78f7..8807627 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:2d13c2172a5d6129c861edaa48b60ead15aeaf58aa75e02d870c4cbdfa63aaba diff --git a/.github/release-please.yml b/.github/release-please.yml index 466597e..6def37a 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,2 +1,8 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v0 + handleGHRelease: true + releaseType: python From 05e82256627f8877e946511c07f4fb870f5a80b2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 28 Mar 2022 19:51:04 -0400 Subject: [PATCH 091/159] chore(python): use black==22.3.0 (#157) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 5 +- .../services/cloud_memcache/async_client.py | 58 +- .../services/cloud_memcache/client.py | 109 +++- .../cloud_memcache/transports/base.py | 38 +- .../cloud_memcache/transports/grpc.py | 3 +- .../cloud/memcache_v1/types/cloud_memcache.py | 293 ++++++++-- .../services/cloud_memcache/async_client.py | 65 ++- .../services/cloud_memcache/client.py | 116 +++- .../cloud_memcache/transports/base.py | 38 +- .../cloud_memcache/transports/grpc.py | 3 +- .../memcache_v1beta2/types/cloud_memcache.py | 326 ++++++++--- noxfile.py | 9 +- .../gapic/memcache_v1/test_cloud_memcache.py | 495 ++++++++++++---- .../memcache_v1beta2/test_cloud_memcache.py | 540 +++++++++++++----- 15 files changed, 1630 insertions(+), 470 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 8807627..87dd006 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2d13c2172a5d6129c861edaa48b60ead15aeaf58aa75e02d870c4cbdfa63aaba + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/docs/conf.py b/docs/conf.py index 989f4ee..7e01871 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 841b3c7..19d6158 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -317,12 +317,20 @@ def sample_list_instances(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListInstancesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -413,7 +421,12 @@ def sample_get_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -547,7 +560,12 @@ def sample_create_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -672,7 +690,12 @@ def sample_update_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -799,7 +822,12 @@ def sample_update_parameters(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -916,7 +944,12 @@ def sample_delete_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1048,7 +1081,12 @@ def sample_apply_parameters(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1070,7 +1108,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-memcache", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index ad87e51..180b4ba 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -58,7 +58,10 @@ class CloudMemcacheClientMeta(type): _transport_registry["grpc"] = CloudMemcacheGrpcTransport _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[CloudMemcacheTransport]: """Returns an appropriate transport class. Args: @@ -182,10 +185,16 @@ def transport(self) -> CloudMemcacheTransport: return self._transport @staticmethod - def instance_path(project: str, location: str, instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, location=location, instance=instance, + project=project, + location=location, + instance=instance, ) @staticmethod @@ -198,7 +207,9 @@ def parse_instance_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -211,9 +222,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -222,9 +237,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -233,9 +252,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -244,10 +267,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -512,12 +539,20 @@ def sample_list_instances(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListInstancesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -608,7 +643,12 @@ def sample_get_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -742,7 +782,12 @@ def sample_create_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -867,7 +912,12 @@ def sample_update_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -994,7 +1044,12 @@ def sample_update_parameters(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1111,7 +1166,12 @@ def sample_delete_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1243,7 +1303,12 @@ def sample_apply_parameters(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1272,7 +1337,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-memcache", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 1be13f1..3549659 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -31,7 +31,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-memcache", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -121,34 +123,48 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, default_timeout=1200.0, client_info=client_info, + self.list_instances, + default_timeout=1200.0, + client_info=client_info, ), self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, default_timeout=1200.0, client_info=client_info, + self.get_instance, + default_timeout=1200.0, + client_info=client_info, ), self.create_instance: gapic_v1.method.wrap_method( - self.create_instance, default_timeout=1200.0, client_info=client_info, + self.create_instance, + default_timeout=1200.0, + client_info=client_info, ), self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, default_timeout=1200.0, client_info=client_info, + self.update_instance, + default_timeout=1200.0, + client_info=client_info, ), self.update_parameters: gapic_v1.method.wrap_method( - self.update_parameters, default_timeout=1200.0, client_info=client_info, + self.update_parameters, + default_timeout=1200.0, + client_info=client_info, ), self.delete_instance: gapic_v1.method.wrap_method( - self.delete_instance, default_timeout=1200.0, client_info=client_info, + self.delete_instance, + default_timeout=1200.0, + client_info=client_info, ), self.apply_parameters: gapic_v1.method.wrap_method( - self.apply_parameters, default_timeout=1200.0, client_info=client_info, + self.apply_parameters, + default_timeout=1200.0, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 52a92dc..a470b86 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -245,8 +245,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index a66f713..c002c1b 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -138,8 +138,14 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field(proto.INT32, number=1,) - memory_size_mb = proto.Field(proto.INT32, number=2,) + cpu_count = proto.Field( + proto.INT32, + number=1, + ) + memory_size_mb = proto.Field( + proto.INT32, + number=2, + ) class Node(proto.Message): r""" @@ -175,12 +181,32 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field(proto.STRING, number=1,) - zone = proto.Field(proto.STRING, number=2,) - state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) - host = proto.Field(proto.STRING, number=4,) - port = proto.Field(proto.INT32, number=5,) - parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) + node_id = proto.Field( + proto.STRING, + number=1, + ) + zone = proto.Field( + proto.STRING, + number=2, + ) + state = proto.Field( + proto.ENUM, + number=3, + enum="Instance.Node.State", + ) + host = proto.Field( + proto.STRING, + number=4, + ) + port = proto.Field( + proto.INT32, + number=5, + ) + parameters = proto.Field( + proto.MESSAGE, + number=6, + message="MemcacheParameters", + ) class InstanceMessage(proto.Message): r""" @@ -199,31 +225,89 @@ class Code(proto.Enum): CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 - code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) - message = proto.Field(proto.STRING, number=2,) - - name = proto.Field(proto.STRING, number=1,) - display_name = proto.Field(proto.STRING, number=2,) - labels = proto.MapField(proto.STRING, proto.STRING, number=3,) - authorized_network = proto.Field(proto.STRING, number=4,) - zones = proto.RepeatedField(proto.STRING, number=5,) - node_count = proto.Field(proto.INT32, number=6,) - node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) - memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) - parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) - memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) + code = proto.Field( + proto.ENUM, + number=1, + enum="Instance.InstanceMessage.Code", + ) + message = proto.Field( + proto.STRING, + number=2, + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + authorized_network = proto.Field( + proto.STRING, + number=4, + ) + zones = proto.RepeatedField( + proto.STRING, + number=5, + ) + node_count = proto.Field( + proto.INT32, + number=6, + ) + node_config = proto.Field( + proto.MESSAGE, + number=7, + message=NodeConfig, + ) + memcache_version = proto.Field( + proto.ENUM, + number=9, + enum="MemcacheVersion", + ) + parameters = proto.Field( + proto.MESSAGE, + number=11, + message="MemcacheParameters", + ) + memcache_nodes = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=Node, + ) create_time = proto.Field( - proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, ) update_time = proto.Field( - proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.ENUM, + number=15, + enum=State, + ) + memcache_full_version = proto.Field( + proto.STRING, + number=18, ) - state = proto.Field(proto.ENUM, number=15, enum=State,) - memcache_full_version = proto.Field(proto.STRING, number=18,) instance_messages = proto.RepeatedField( - proto.MESSAGE, number=19, message=InstanceMessage, + proto.MESSAGE, + number=19, + message=InstanceMessage, + ) + discovery_endpoint = proto.Field( + proto.STRING, + number=20, ) - discovery_endpoint = proto.Field(proto.STRING, number=20,) class ListInstancesRequest(proto.Message): @@ -256,11 +340,26 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=4,) - order_by = proto.Field(proto.STRING, number=5,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListInstancesResponse(proto.Message): @@ -287,9 +386,19 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - next_page_token = proto.Field(proto.STRING, number=2,) - unreachable = proto.RepeatedField(proto.STRING, number=3,) + instances = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Instance", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, + ) class GetInstanceRequest(proto.Message): @@ -303,7 +412,10 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class CreateInstanceRequest(proto.Message): @@ -332,9 +444,19 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached Instance """ - parent = proto.Field(proto.STRING, number=1,) - instance_id = proto.Field(proto.STRING, number=2,) - instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) + parent = proto.Field( + proto.STRING, + number=1, + ) + instance_id = proto.Field( + proto.STRING, + number=2, + ) + instance = proto.Field( + proto.MESSAGE, + number=3, + message="Instance", + ) class UpdateInstanceRequest(proto.Message): @@ -352,9 +474,15 @@ class UpdateInstanceRequest(proto.Message): """ update_mask = proto.Field( - proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance = proto.Field( + proto.MESSAGE, + number=2, + message="Instance", ) - instance = proto.Field(proto.MESSAGE, number=2, message="Instance",) class DeleteInstanceRequest(proto.Message): @@ -368,7 +496,10 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class ApplyParametersRequest(proto.Message): @@ -391,9 +522,18 @@ class ApplyParametersRequest(proto.Message): nodes within the instance. """ - name = proto.Field(proto.STRING, number=1,) - node_ids = proto.RepeatedField(proto.STRING, number=2,) - apply_all = proto.Field(proto.BOOL, number=3,) + name = proto.Field( + proto.STRING, + number=1, + ) + node_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + apply_all = proto.Field( + proto.BOOL, + number=3, + ) class UpdateParametersRequest(proto.Message): @@ -411,11 +551,20 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) update_mask = proto.Field( - proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + parameters = proto.Field( + proto.MESSAGE, + number=3, + message="MemcacheParameters", ) - parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) class MemcacheParameters(proto.Message): @@ -434,8 +583,15 @@ class MemcacheParameters(proto.Message): memcached process. """ - id = proto.Field(proto.STRING, number=1,) - params = proto.MapField(proto.STRING, proto.STRING, number=3,) + id = proto.Field( + proto.STRING, + number=1, + ) + params = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) class OperationMetadata(proto.Message): @@ -468,13 +624,36 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - target = proto.Field(proto.STRING, number=3,) - verb = proto.Field(proto.STRING, number=4,) - status_detail = proto.Field(proto.STRING, number=5,) - cancel_requested = proto.Field(proto.BOOL, number=6,) - api_version = proto.Field(proto.STRING, number=7,) + create_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target = proto.Field( + proto.STRING, + number=3, + ) + verb = proto.Field( + proto.STRING, + number=4, + ) + status_detail = proto.Field( + proto.STRING, + number=5, + ) + cancel_requested = proto.Field( + proto.BOOL, + number=6, + ) + api_version = proto.Field( + proto.STRING, + number=7, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index b8c094f..b6d968a 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -317,12 +317,20 @@ def sample_list_instances(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListInstancesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -413,7 +421,12 @@ def sample_get_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -548,7 +561,12 @@ def sample_create_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -674,7 +692,12 @@ def sample_update_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -802,7 +825,12 @@ def sample_update_parameters(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -919,7 +947,12 @@ def sample_delete_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1050,7 +1083,12 @@ def sample_apply_parameters(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1183,7 +1221,12 @@ def sample_apply_software_update(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1205,7 +1248,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-memcache", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index c00cab0..f245684 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -58,7 +58,10 @@ class CloudMemcacheClientMeta(type): _transport_registry["grpc"] = CloudMemcacheGrpcTransport _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[CloudMemcacheTransport]: """Returns an appropriate transport class. Args: @@ -182,10 +185,16 @@ def transport(self) -> CloudMemcacheTransport: return self._transport @staticmethod - def instance_path(project: str, location: str, instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, location=location, instance=instance, + project=project, + location=location, + instance=instance, ) @staticmethod @@ -198,7 +207,9 @@ def parse_instance_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -211,9 +222,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -222,9 +237,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -233,9 +252,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -244,10 +267,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -512,12 +539,20 @@ def sample_list_instances(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListInstancesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -608,7 +643,12 @@ def sample_get_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -743,7 +783,12 @@ def sample_create_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -869,7 +914,12 @@ def sample_update_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -997,7 +1047,12 @@ def sample_update_parameters(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1114,7 +1169,12 @@ def sample_delete_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1245,7 +1305,12 @@ def sample_apply_parameters(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1378,7 +1443,12 @@ def sample_apply_software_update(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1407,7 +1477,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-memcache", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index ef164f4..479ea37 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -31,7 +31,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-memcache", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -121,25 +123,39 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, default_timeout=1200.0, client_info=client_info, + self.list_instances, + default_timeout=1200.0, + client_info=client_info, ), self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, default_timeout=1200.0, client_info=client_info, + self.get_instance, + default_timeout=1200.0, + client_info=client_info, ), self.create_instance: gapic_v1.method.wrap_method( - self.create_instance, default_timeout=1200.0, client_info=client_info, + self.create_instance, + default_timeout=1200.0, + client_info=client_info, ), self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, default_timeout=1200.0, client_info=client_info, + self.update_instance, + default_timeout=1200.0, + client_info=client_info, ), self.update_parameters: gapic_v1.method.wrap_method( - self.update_parameters, default_timeout=1200.0, client_info=client_info, + self.update_parameters, + default_timeout=1200.0, + client_info=client_info, ), self.delete_instance: gapic_v1.method.wrap_method( - self.delete_instance, default_timeout=1200.0, client_info=client_info, + self.delete_instance, + default_timeout=1200.0, + client_info=client_info, ), self.apply_parameters: gapic_v1.method.wrap_method( - self.apply_parameters, default_timeout=1200.0, client_info=client_info, + self.apply_parameters, + default_timeout=1200.0, + client_info=client_info, ), self.apply_software_update: gapic_v1.method.wrap_method( self.apply_software_update, @@ -151,9 +167,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 6a163ad..6dffaf3 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -245,8 +245,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index dbdf720..413d6cf 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -147,8 +147,14 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field(proto.INT32, number=1,) - memory_size_mb = proto.Field(proto.INT32, number=2,) + cpu_count = proto.Field( + proto.INT32, + number=1, + ) + memory_size_mb = proto.Field( + proto.INT32, + number=2, + ) class Node(proto.Message): r""" @@ -187,13 +193,36 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field(proto.STRING, number=1,) - zone = proto.Field(proto.STRING, number=2,) - state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) - host = proto.Field(proto.STRING, number=4,) - port = proto.Field(proto.INT32, number=5,) - parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) - update_available = proto.Field(proto.BOOL, number=7,) + node_id = proto.Field( + proto.STRING, + number=1, + ) + zone = proto.Field( + proto.STRING, + number=2, + ) + state = proto.Field( + proto.ENUM, + number=3, + enum="Instance.Node.State", + ) + host = proto.Field( + proto.STRING, + number=4, + ) + port = proto.Field( + proto.INT32, + number=5, + ) + parameters = proto.Field( + proto.MESSAGE, + number=6, + message="MemcacheParameters", + ) + update_available = proto.Field( + proto.BOOL, + number=7, + ) class InstanceMessage(proto.Message): r""" @@ -212,32 +241,93 @@ class Code(proto.Enum): CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 - code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) - message = proto.Field(proto.STRING, number=2,) - - name = proto.Field(proto.STRING, number=1,) - display_name = proto.Field(proto.STRING, number=2,) - labels = proto.MapField(proto.STRING, proto.STRING, number=3,) - authorized_network = proto.Field(proto.STRING, number=4,) - zones = proto.RepeatedField(proto.STRING, number=5,) - node_count = proto.Field(proto.INT32, number=6,) - node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) - memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) - parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) - memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) + code = proto.Field( + proto.ENUM, + number=1, + enum="Instance.InstanceMessage.Code", + ) + message = proto.Field( + proto.STRING, + number=2, + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + authorized_network = proto.Field( + proto.STRING, + number=4, + ) + zones = proto.RepeatedField( + proto.STRING, + number=5, + ) + node_count = proto.Field( + proto.INT32, + number=6, + ) + node_config = proto.Field( + proto.MESSAGE, + number=7, + message=NodeConfig, + ) + memcache_version = proto.Field( + proto.ENUM, + number=9, + enum="MemcacheVersion", + ) + parameters = proto.Field( + proto.MESSAGE, + number=11, + message="MemcacheParameters", + ) + memcache_nodes = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=Node, + ) create_time = proto.Field( - proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, ) update_time = proto.Field( - proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.ENUM, + number=15, + enum=State, + ) + memcache_full_version = proto.Field( + proto.STRING, + number=18, ) - state = proto.Field(proto.ENUM, number=15, enum=State,) - memcache_full_version = proto.Field(proto.STRING, number=18,) instance_messages = proto.RepeatedField( - proto.MESSAGE, number=19, message=InstanceMessage, + proto.MESSAGE, + number=19, + message=InstanceMessage, + ) + discovery_endpoint = proto.Field( + proto.STRING, + number=20, + ) + update_available = proto.Field( + proto.BOOL, + number=21, ) - discovery_endpoint = proto.Field(proto.STRING, number=20,) - update_available = proto.Field(proto.BOOL, number=21,) class ListInstancesRequest(proto.Message): @@ -270,11 +360,26 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=4,) - order_by = proto.Field(proto.STRING, number=5,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListInstancesResponse(proto.Message): @@ -301,9 +406,19 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - resources = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - next_page_token = proto.Field(proto.STRING, number=2,) - unreachable = proto.RepeatedField(proto.STRING, number=3,) + resources = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Instance", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, + ) class GetInstanceRequest(proto.Message): @@ -317,7 +432,10 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class CreateInstanceRequest(proto.Message): @@ -346,9 +464,19 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached [Instance] resource """ - parent = proto.Field(proto.STRING, number=1,) - instance_id = proto.Field(proto.STRING, number=2,) - resource = proto.Field(proto.MESSAGE, number=3, message="Instance",) + parent = proto.Field( + proto.STRING, + number=1, + ) + instance_id = proto.Field( + proto.STRING, + number=2, + ) + resource = proto.Field( + proto.MESSAGE, + number=3, + message="Instance", + ) class UpdateInstanceRequest(proto.Message): @@ -366,9 +494,15 @@ class UpdateInstanceRequest(proto.Message): """ update_mask = proto.Field( - proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + resource = proto.Field( + proto.MESSAGE, + number=2, + message="Instance", ) - resource = proto.Field(proto.MESSAGE, number=2, message="Instance",) class DeleteInstanceRequest(proto.Message): @@ -382,7 +516,10 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class ApplyParametersRequest(proto.Message): @@ -404,9 +541,18 @@ class ApplyParametersRequest(proto.Message): within the instance. """ - name = proto.Field(proto.STRING, number=1,) - node_ids = proto.RepeatedField(proto.STRING, number=2,) - apply_all = proto.Field(proto.BOOL, number=3,) + name = proto.Field( + proto.STRING, + number=1, + ) + node_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + apply_all = proto.Field( + proto.BOOL, + number=3, + ) class UpdateParametersRequest(proto.Message): @@ -424,11 +570,20 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) update_mask = proto.Field( - proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + parameters = proto.Field( + proto.MESSAGE, + number=3, + message="MemcacheParameters", ) - parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) class ApplySoftwareUpdateRequest(proto.Message): @@ -452,9 +607,18 @@ class ApplySoftwareUpdateRequest(proto.Message): instance. """ - instance = proto.Field(proto.STRING, number=1,) - node_ids = proto.RepeatedField(proto.STRING, number=2,) - apply_all = proto.Field(proto.BOOL, number=3,) + instance = proto.Field( + proto.STRING, + number=1, + ) + node_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + apply_all = proto.Field( + proto.BOOL, + number=3, + ) class MemcacheParameters(proto.Message): @@ -472,8 +636,15 @@ class MemcacheParameters(proto.Message): memcached process. """ - id = proto.Field(proto.STRING, number=1,) - params = proto.MapField(proto.STRING, proto.STRING, number=3,) + id = proto.Field( + proto.STRING, + number=1, + ) + params = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) class OperationMetadata(proto.Message): @@ -506,13 +677,36 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - target = proto.Field(proto.STRING, number=3,) - verb = proto.Field(proto.STRING, number=4,) - status_detail = proto.Field(proto.STRING, number=5,) - cancel_requested = proto.Field(proto.BOOL, number=6,) - api_version = proto.Field(proto.STRING, number=7,) + create_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target = proto.Field( + proto.STRING, + number=3, + ) + verb = proto.Field( + proto.STRING, + number=4, + ) + status_detail = proto.Field( + proto.STRING, + number=5, + ) + cancel_requested = proto.Field( + proto.BOOL, + number=6, + ) + api_version = proto.Field( + proto.STRING, + number=7, + ) class LocationMetadata(proto.Message): @@ -528,13 +722,15 @@ class LocationMetadata(proto.Message): """ available_zones = proto.MapField( - proto.STRING, proto.MESSAGE, number=1, message="ZoneMetadata", + proto.STRING, + proto.MESSAGE, + number=1, + message="ZoneMetadata", ) class ZoneMetadata(proto.Message): - r""" - """ + r""" """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 2a2001c..3addb4e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -57,7 +57,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -67,7 +69,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 774fd46..7ad6060 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -92,7 +92,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] + "client_class", + [ + CloudMemcacheClient, + CloudMemcacheAsyncClient, + ], ) def test_cloud_memcache_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -134,7 +138,11 @@ def test_cloud_memcache_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] + "client_class", + [ + CloudMemcacheClient, + CloudMemcacheAsyncClient, + ], ) def test_cloud_memcache_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -498,7 +506,9 @@ def test_cloud_memcache_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -638,10 +648,17 @@ def test_cloud_memcache_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [cloud_memcache.ListInstancesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ListInstancesRequest, + dict, + ], +) def test_list_instances(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -652,7 +669,8 @@ def test_list_instances(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", unreachable=["unreachable_value"], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_instances(request) @@ -671,7 +689,8 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -687,7 +706,8 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -722,7 +742,9 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -742,7 +764,10 @@ def test_list_instances_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -771,11 +796,16 @@ async def test_list_instances_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -783,7 +813,9 @@ def test_list_instances_flattened(): call.return_value = cloud_memcache.ListInstancesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_instances(parent="parent_value",) + client.list_instances( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -795,13 +827,16 @@ def test_list_instances_flattened(): def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_instances( - cloud_memcache.ListInstancesRequest(), parent="parent_value", + cloud_memcache.ListInstancesRequest(), + parent="parent_value", ) @@ -821,7 +856,9 @@ async def test_list_instances_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_instances(parent="parent_value",) + response = await client.list_instances( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -842,13 +879,15 @@ async def test_list_instances_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_instances( - cloud_memcache.ListInstancesRequest(), parent="parent_value", + cloud_memcache.ListInstancesRequest(), + parent="parent_value", ) def test_list_instances_pager(transport_name: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -863,12 +902,21 @@ def test_list_instances_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), cloud_memcache.ListInstancesResponse( - instances=[cloud_memcache.Instance(),], next_page_token="ghi", + instances=[], + next_page_token="def", + ), + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", ), cloud_memcache.ListInstancesResponse( - instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], ), RuntimeError, ) @@ -888,7 +936,8 @@ def test_list_instances_pager(transport_name: str = "grpc"): def test_list_instances_pages(transport_name: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -903,12 +952,21 @@ def test_list_instances_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), cloud_memcache.ListInstancesResponse( - instances=[cloud_memcache.Instance(),], next_page_token="ghi", + instances=[], + next_page_token="def", + ), + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", ), cloud_memcache.ListInstancesResponse( - instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], ), RuntimeError, ) @@ -919,7 +977,9 @@ def test_list_instances_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -935,16 +995,27 @@ async def test_list_instances_async_pager(): ], next_page_token="abc", ), - cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), cloud_memcache.ListInstancesResponse( - instances=[cloud_memcache.Instance(),], next_page_token="ghi", + instances=[], + next_page_token="def", ), cloud_memcache.ListInstancesResponse( - instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + instances=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], ), RuntimeError, ) - async_pager = await client.list_instances(request={},) + async_pager = await client.list_instances( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -956,7 +1027,9 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -972,12 +1045,21 @@ async def test_list_instances_async_pages(): ], next_page_token="abc", ), - cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), cloud_memcache.ListInstancesResponse( - instances=[cloud_memcache.Instance(),], next_page_token="ghi", + instances=[], + next_page_token="def", + ), + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", ), cloud_memcache.ListInstancesResponse( - instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], ), RuntimeError, ) @@ -988,10 +1070,17 @@ async def test_list_instances_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [cloud_memcache.GetInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.GetInstanceRequest, + dict, + ], +) def test_get_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1036,7 +1125,8 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1052,7 +1142,8 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1101,7 +1192,9 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1121,7 +1214,10 @@ def test_get_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1150,11 +1246,16 @@ async def test_get_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1162,7 +1263,9 @@ def test_get_instance_flattened(): call.return_value = cloud_memcache.Instance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_instance(name="name_value",) + client.get_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1174,13 +1277,16 @@ def test_get_instance_flattened(): def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_instance( - cloud_memcache.GetInstanceRequest(), name="name_value", + cloud_memcache.GetInstanceRequest(), + name="name_value", ) @@ -1200,7 +1306,9 @@ async def test_get_instance_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_instance(name="name_value",) + response = await client.get_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1221,14 +1329,22 @@ async def test_get_instance_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_instance( - cloud_memcache.GetInstanceRequest(), name="name_value", + cloud_memcache.GetInstanceRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [cloud_memcache.CreateInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.CreateInstanceRequest, + dict, + ], +) def test_create_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1254,7 +1370,8 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1270,7 +1387,8 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1300,7 +1418,9 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1320,7 +1440,10 @@ def test_create_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1349,11 +1472,16 @@ async def test_create_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1383,7 +1511,9 @@ def test_create_instance_flattened(): def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1450,10 +1580,17 @@ async def test_create_instance_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [cloud_memcache.UpdateInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.UpdateInstanceRequest, + dict, + ], +) def test_update_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1479,7 +1616,8 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1495,7 +1633,8 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1525,7 +1664,9 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1545,9 +1686,10 @@ def test_update_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "instance.name=instance.name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1576,13 +1718,16 @@ async def test_update_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "instance.name=instance.name/value", + ) in kw["metadata"] def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1608,7 +1753,9 @@ def test_update_instance_flattened(): def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1670,11 +1817,16 @@ async def test_update_instance_flattened_error_async(): @pytest.mark.parametrize( - "request_type", [cloud_memcache.UpdateParametersRequest, dict,] + "request_type", + [ + cloud_memcache.UpdateParametersRequest, + dict, + ], ) def test_update_parameters(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1702,7 +1854,8 @@ def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1720,7 +1873,8 @@ async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1752,7 +1906,9 @@ async def test_update_parameters_async_from_dict(): def test_update_parameters_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1774,7 +1930,10 @@ def test_update_parameters_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1805,11 +1964,16 @@ async def test_update_parameters_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1841,7 +2005,9 @@ def test_update_parameters_flattened(): def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1910,10 +2076,17 @@ async def test_update_parameters_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [cloud_memcache.DeleteInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1939,7 +2112,8 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1955,7 +2129,8 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1985,7 +2160,9 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2005,7 +2182,10 @@ def test_delete_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2034,11 +2214,16 @@ async def test_delete_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -2046,7 +2231,9 @@ def test_delete_instance_flattened(): call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_instance(name="name_value",) + client.delete_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2058,13 +2245,16 @@ def test_delete_instance_flattened(): def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), name="name_value", + cloud_memcache.DeleteInstanceRequest(), + name="name_value", ) @@ -2084,7 +2274,9 @@ async def test_delete_instance_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_instance(name="name_value",) + response = await client.delete_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2105,14 +2297,22 @@ async def test_delete_instance_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), name="name_value", + cloud_memcache.DeleteInstanceRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [cloud_memcache.ApplyParametersRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ApplyParametersRequest, + dict, + ], +) def test_apply_parameters(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2138,7 +2338,8 @@ def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2154,7 +2355,8 @@ async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2184,7 +2386,9 @@ async def test_apply_parameters_async_from_dict(): def test_apply_parameters_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2204,7 +2408,10 @@ def test_apply_parameters_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2233,11 +2440,16 @@ async def test_apply_parameters_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2246,7 +2458,9 @@ def test_apply_parameters_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_parameters( - name="name_value", node_ids=["node_ids_value"], apply_all=True, + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, ) # Establish that the underlying call was made with the expected @@ -2265,7 +2479,9 @@ def test_apply_parameters_flattened(): def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2295,7 +2511,9 @@ async def test_apply_parameters_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.apply_parameters( - name="name_value", node_ids=["node_ids_value"], apply_all=True, + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, ) # Establish that the underlying call was made with the expected @@ -2337,7 +2555,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2357,7 +2576,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = CloudMemcacheClient(client_options=options, transport=transport,) + client = CloudMemcacheClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -2373,7 +2595,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = CloudMemcacheClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2418,8 +2641,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudMemcacheGrpcTransport, + ) def test_cloud_memcache_base_transport_error(): @@ -2475,7 +2703,8 @@ def test_cloud_memcache_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2633,7 +2862,8 @@ def test_cloud_memcache_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2645,7 +2875,8 @@ def test_cloud_memcache_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.CloudMemcacheGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2752,12 +2983,16 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): def test_cloud_memcache_grpc_lro_client(): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2765,12 +3000,16 @@ def test_cloud_memcache_grpc_lro_client(): def test_cloud_memcache_grpc_lro_async_client(): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2781,7 +3020,9 @@ def test_instance_path(): location = "clam" instance = "whelk" expected = "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, location=location, instance=instance, + project=project, + location=location, + instance=instance, ) actual = CloudMemcacheClient.instance_path(project, location, instance) assert expected == actual @@ -2822,7 +3063,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = CloudMemcacheClient.common_folder_path(folder) assert expected == actual @@ -2840,7 +3083,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = CloudMemcacheClient.common_organization_path(organization) assert expected == actual @@ -2858,7 +3103,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = CloudMemcacheClient.common_project_path(project) assert expected == actual @@ -2878,7 +3125,8 @@ def test_common_location_path(): project = "whelk" location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = CloudMemcacheClient.common_location_path(project, location) assert expected == actual @@ -2903,7 +3151,8 @@ def test_client_with_default_client_info(): transports.CloudMemcacheTransport, "_prep_wrapped_messages" ) as prep: client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2912,7 +3161,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = CloudMemcacheClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2920,7 +3170,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 0adabce..c57d3b5 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -94,7 +94,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] + "client_class", + [ + CloudMemcacheClient, + CloudMemcacheAsyncClient, + ], ) def test_cloud_memcache_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -136,7 +140,11 @@ def test_cloud_memcache_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] + "client_class", + [ + CloudMemcacheClient, + CloudMemcacheAsyncClient, + ], ) def test_cloud_memcache_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -500,7 +508,9 @@ def test_cloud_memcache_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -640,10 +650,17 @@ def test_cloud_memcache_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [cloud_memcache.ListInstancesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ListInstancesRequest, + dict, + ], +) def test_list_instances(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -654,7 +671,8 @@ def test_list_instances(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", unreachable=["unreachable_value"], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_instances(request) @@ -673,7 +691,8 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -689,7 +708,8 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -724,7 +744,9 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -744,7 +766,10 @@ def test_list_instances_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -773,11 +798,16 @@ async def test_list_instances_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_instances_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -785,7 +815,9 @@ def test_list_instances_flattened(): call.return_value = cloud_memcache.ListInstancesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_instances(parent="parent_value",) + client.list_instances( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -797,13 +829,16 @@ def test_list_instances_flattened(): def test_list_instances_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_instances( - cloud_memcache.ListInstancesRequest(), parent="parent_value", + cloud_memcache.ListInstancesRequest(), + parent="parent_value", ) @@ -823,7 +858,9 @@ async def test_list_instances_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_instances(parent="parent_value",) + response = await client.list_instances( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -844,13 +881,15 @@ async def test_list_instances_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_instances( - cloud_memcache.ListInstancesRequest(), parent="parent_value", + cloud_memcache.ListInstancesRequest(), + parent="parent_value", ) def test_list_instances_pager(transport_name: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -865,12 +904,21 @@ def test_list_instances_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def",), cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(),], next_page_token="ghi", + resources=[], + next_page_token="def", ), cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + resources=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], ), RuntimeError, ) @@ -890,7 +938,8 @@ def test_list_instances_pager(transport_name: str = "grpc"): def test_list_instances_pages(transport_name: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -905,12 +954,21 @@ def test_list_instances_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def",), cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(),], next_page_token="ghi", + resources=[], + next_page_token="def", ), cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + resources=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], ), RuntimeError, ) @@ -921,7 +979,9 @@ def test_list_instances_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -937,16 +997,27 @@ async def test_list_instances_async_pager(): ], next_page_token="abc", ), - cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def",), cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(),], next_page_token="ghi", + resources=[], + next_page_token="def", ), cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + resources=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], ), RuntimeError, ) - async_pager = await client.list_instances(request={},) + async_pager = await client.list_instances( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -958,7 +1029,9 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -974,12 +1047,21 @@ async def test_list_instances_async_pages(): ], next_page_token="abc", ), - cloud_memcache.ListInstancesResponse(resources=[], next_page_token="def",), cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(),], next_page_token="ghi", + resources=[], + next_page_token="def", + ), + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", ), cloud_memcache.ListInstancesResponse( - resources=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], ), RuntimeError, ) @@ -990,10 +1072,17 @@ async def test_list_instances_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [cloud_memcache.GetInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.GetInstanceRequest, + dict, + ], +) def test_get_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1040,7 +1129,8 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1056,7 +1146,8 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1107,7 +1198,9 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1127,7 +1220,10 @@ def test_get_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1156,11 +1252,16 @@ async def test_get_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1168,7 +1269,9 @@ def test_get_instance_flattened(): call.return_value = cloud_memcache.Instance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_instance(name="name_value",) + client.get_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1180,13 +1283,16 @@ def test_get_instance_flattened(): def test_get_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_instance( - cloud_memcache.GetInstanceRequest(), name="name_value", + cloud_memcache.GetInstanceRequest(), + name="name_value", ) @@ -1206,7 +1312,9 @@ async def test_get_instance_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_instance(name="name_value",) + response = await client.get_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1227,14 +1335,22 @@ async def test_get_instance_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_instance( - cloud_memcache.GetInstanceRequest(), name="name_value", + cloud_memcache.GetInstanceRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [cloud_memcache.CreateInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.CreateInstanceRequest, + dict, + ], +) def test_create_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1260,7 +1376,8 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1276,7 +1393,8 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1306,7 +1424,9 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1326,7 +1446,10 @@ def test_create_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1355,11 +1478,16 @@ async def test_create_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1389,7 +1517,9 @@ def test_create_instance_flattened(): def test_create_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1456,10 +1586,17 @@ async def test_create_instance_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [cloud_memcache.UpdateInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.UpdateInstanceRequest, + dict, + ], +) def test_update_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1485,7 +1622,8 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1501,7 +1639,8 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1531,7 +1670,9 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1551,9 +1692,10 @@ def test_update_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource.name=resource.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "resource.name=resource.name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1582,13 +1724,16 @@ async def test_update_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource.name=resource.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "resource.name=resource.name/value", + ) in kw["metadata"] def test_update_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1614,7 +1759,9 @@ def test_update_instance_flattened(): def test_update_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1676,11 +1823,16 @@ async def test_update_instance_flattened_error_async(): @pytest.mark.parametrize( - "request_type", [cloud_memcache.UpdateParametersRequest, dict,] + "request_type", + [ + cloud_memcache.UpdateParametersRequest, + dict, + ], ) def test_update_parameters(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1708,7 +1860,8 @@ def test_update_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1726,7 +1879,8 @@ async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1758,7 +1912,9 @@ async def test_update_parameters_async_from_dict(): def test_update_parameters_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1780,7 +1936,10 @@ def test_update_parameters_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1811,11 +1970,16 @@ async def test_update_parameters_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_update_parameters_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1847,7 +2011,9 @@ def test_update_parameters_flattened(): def test_update_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1916,10 +2082,17 @@ async def test_update_parameters_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [cloud_memcache.DeleteInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1945,7 +2118,8 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1961,7 +2135,8 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1991,7 +2166,9 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2011,7 +2188,10 @@ def test_delete_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2040,11 +2220,16 @@ async def test_delete_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_instance_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -2052,7 +2237,9 @@ def test_delete_instance_flattened(): call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_instance(name="name_value",) + client.delete_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2064,13 +2251,16 @@ def test_delete_instance_flattened(): def test_delete_instance_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), name="name_value", + cloud_memcache.DeleteInstanceRequest(), + name="name_value", ) @@ -2090,7 +2280,9 @@ async def test_delete_instance_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_instance(name="name_value",) + response = await client.delete_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2111,14 +2303,22 @@ async def test_delete_instance_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), name="name_value", + cloud_memcache.DeleteInstanceRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [cloud_memcache.ApplyParametersRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ApplyParametersRequest, + dict, + ], +) def test_apply_parameters(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2144,7 +2344,8 @@ def test_apply_parameters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2160,7 +2361,8 @@ async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2190,7 +2392,9 @@ async def test_apply_parameters_async_from_dict(): def test_apply_parameters_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2210,7 +2414,10 @@ def test_apply_parameters_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2239,11 +2446,16 @@ async def test_apply_parameters_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_apply_parameters_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2252,7 +2464,9 @@ def test_apply_parameters_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_parameters( - name="name_value", node_ids=["node_ids_value"], apply_all=True, + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, ) # Establish that the underlying call was made with the expected @@ -2271,7 +2485,9 @@ def test_apply_parameters_flattened(): def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2301,7 +2517,9 @@ async def test_apply_parameters_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.apply_parameters( - name="name_value", node_ids=["node_ids_value"], apply_all=True, + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, ) # Establish that the underlying call was made with the expected @@ -2337,11 +2555,16 @@ async def test_apply_parameters_flattened_error_async(): @pytest.mark.parametrize( - "request_type", [cloud_memcache.ApplySoftwareUpdateRequest, dict,] + "request_type", + [ + cloud_memcache.ApplySoftwareUpdateRequest, + dict, + ], ) def test_apply_software_update(request_type, transport: str = "grpc"): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2369,7 +2592,8 @@ def test_apply_software_update_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2388,7 +2612,8 @@ async def test_apply_software_update_async( request_type=cloud_memcache.ApplySoftwareUpdateRequest, ): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2420,7 +2645,9 @@ async def test_apply_software_update_async_from_dict(): def test_apply_software_update_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2442,7 +2669,10 @@ def test_apply_software_update_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "instance=instance/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "instance=instance/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2473,11 +2703,16 @@ async def test_apply_software_update_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "instance=instance/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "instance=instance/value", + ) in kw["metadata"] def test_apply_software_update_flattened(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2488,7 +2723,9 @@ def test_apply_software_update_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.apply_software_update( - instance="instance_value", node_ids=["node_ids_value"], apply_all=True, + instance="instance_value", + node_ids=["node_ids_value"], + apply_all=True, ) # Establish that the underlying call was made with the expected @@ -2507,7 +2744,9 @@ def test_apply_software_update_flattened(): def test_apply_software_update_flattened_error(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2539,7 +2778,9 @@ async def test_apply_software_update_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.apply_software_update( - instance="instance_value", node_ids=["node_ids_value"], apply_all=True, + instance="instance_value", + node_ids=["node_ids_value"], + apply_all=True, ) # Establish that the underlying call was made with the expected @@ -2581,7 +2822,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2601,7 +2843,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = CloudMemcacheClient(client_options=options, transport=transport,) + client = CloudMemcacheClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -2617,7 +2862,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = CloudMemcacheClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2662,8 +2908,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudMemcacheGrpcTransport, + ) def test_cloud_memcache_base_transport_error(): @@ -2720,7 +2971,8 @@ def test_cloud_memcache_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2878,7 +3130,8 @@ def test_cloud_memcache_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2890,7 +3143,8 @@ def test_cloud_memcache_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.CloudMemcacheGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2997,12 +3251,16 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): def test_cloud_memcache_grpc_lro_client(): client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3010,12 +3268,16 @@ def test_cloud_memcache_grpc_lro_client(): def test_cloud_memcache_grpc_lro_async_client(): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3026,7 +3288,9 @@ def test_instance_path(): location = "clam" instance = "whelk" expected = "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, location=location, instance=instance, + project=project, + location=location, + instance=instance, ) actual = CloudMemcacheClient.instance_path(project, location, instance) assert expected == actual @@ -3067,7 +3331,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = CloudMemcacheClient.common_folder_path(folder) assert expected == actual @@ -3085,7 +3351,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = CloudMemcacheClient.common_organization_path(organization) assert expected == actual @@ -3103,7 +3371,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = CloudMemcacheClient.common_project_path(project) assert expected == actual @@ -3123,7 +3393,8 @@ def test_common_location_path(): project = "whelk" location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = CloudMemcacheClient.common_location_path(project, location) assert expected == actual @@ -3148,7 +3419,8 @@ def test_client_with_default_client_info(): transports.CloudMemcacheTransport, "_prep_wrapped_messages" ) as prep: client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3157,7 +3429,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = CloudMemcacheClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3165,7 +3438,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" From 09add55cd4cb6c85b077b9657e333c2ec531ff02 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 16:48:12 +0000 Subject: [PATCH 092/159] chore(python): add E231 to .flake8 ignore list (#158) Source-Link: https://github.com/googleapis/synthtool/commit/7ff4aad2ec5af0380e8bd6da1fa06eaadf24ec81 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 --- .flake8 | 2 +- .github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.flake8 b/.flake8 index 29227d4..2e43874 100644 --- a/.flake8 +++ b/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 87dd006..9e0a935 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe + digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 From d0025da622bffcba49dde31c34d4a846ff85804d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 31 Mar 2022 20:13:17 -0400 Subject: [PATCH 093/159] chore(python): update .pre-commit-config.yaml to use black==22.3.0 (#159) Source-Link: https://github.com/googleapis/synthtool/commit/7804ade3daae0d66649bee8df6c55484c6580b8d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 3 ++- .pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 9e0a935..22cc254 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 + digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d +# created: 2022-03-30T23:44:26.560599165Z diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62eb5a7..46d2371 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 From e7aca83c75e83b26436eddbe3ad564b288e8276e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 31 Mar 2022 22:14:49 -0400 Subject: [PATCH 094/159] chore(python): Enable size-label bot (#160) Source-Link: https://github.com/googleapis/synthtool/commit/06e82790dd719a165ad32b8a06f8f6ec3e3cae0f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-label.yaml | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 .github/auto-label.yaml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 22cc254..58a0b15 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d -# created: 2022-03-30T23:44:26.560599165Z + digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce +# created: 2022-04-01T01:42:03.609279246Z diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml new file mode 100644 index 0000000..09c8d73 --- /dev/null +++ b/.github/auto-label.yaml @@ -0,0 +1,2 @@ +requestsize: + enabled: true From 883a37168a9c464969aba1996f21de59545955fb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 19:42:22 +0000 Subject: [PATCH 095/159] chore(python): refactor unit / system test dependency install (#162) Source-Link: https://github.com/googleapis/synthtool/commit/993985f0fc4b37152e588f0549bcbdaf34666023 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd --- .github/.OwlBot.lock.yaml | 4 +- noxfile.py | 105 ++++++++++++++++++++++++++++++-------- 2 files changed, 87 insertions(+), 22 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 58a0b15..fa57622 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce -# created: 2022-04-01T01:42:03.609279246Z + digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd +# created: 2022-04-01T15:48:07.524222836Z diff --git a/noxfile.py b/noxfile.py index 3addb4e..6ee5e8a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -20,16 +20,40 @@ import os import pathlib import shutil +import warnings import nox - BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -81,23 +105,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -121,6 +163,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -143,13 +214,7 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: From eb3e07111faaadab61c9ab244a01e72ffbe7e999 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 07:05:24 -0400 Subject: [PATCH 096/159] chore(python): add license header to auto-label.yaml (#163) Source-Link: https://github.com/googleapis/synthtool/commit/eb78c980b52c7c6746d2edb77d9cf7aaa99a2aab Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-label.yaml | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index fa57622..bc893c9 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd -# created: 2022-04-01T15:48:07.524222836Z + digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 +# created: 2022-04-06T10:30:21.687684602Z diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 09c8d73..41bff0b 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,2 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. requestsize: enabled: true From 87907dfada2c7973c6247b03682b1de43571d313 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Apr 2022 07:21:17 -0400 Subject: [PATCH 097/159] chore: use gapic-generator-python 0.65.1 (#167) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.1 PiperOrigin-RevId: 441524537 Source-Link: https://github.com/googleapis/googleapis/commit/2a273915b3f70fe86c9d2a75470a0b83e48d0abf Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab6756a48c89b5bcb9fb73443cb8e55d574f4643 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWI2NzU2YTQ4Yzg5YjViY2I5ZmI3MzQ0M2NiOGU1NWQ1NzRmNDY0MyJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/cloud_memcache/async_client.py | 5 +- .../services/cloud_memcache/client.py | 5 +- .../cloud_memcache/transports/base.py | 5 + .../cloud_memcache/transports/grpc.py | 4 + .../cloud/memcache_v1/types/cloud_memcache.py | 4 +- .../services/cloud_memcache/async_client.py | 6 +- .../services/cloud_memcache/client.py | 6 +- .../cloud_memcache/transports/base.py | 5 + .../cloud_memcache/transports/grpc.py | 4 + .../memcache_v1beta2/types/cloud_memcache.py | 6 +- .../snippet_metadata_memcache_v1.json | 626 ++++++++++++++- .../snippet_metadata_memcache_v1beta2.json | 722 +++++++++++++++++- .../gapic/memcache_v1/test_cloud_memcache.py | 81 +- .../memcache_v1beta2/test_cloud_memcache.py | 81 +- 14 files changed, 1439 insertions(+), 121 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 19d6158..501b5f6 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -591,7 +591,6 @@ async def update_instance( r"""Updates an existing Instance in a given project and location. - .. code-block:: python from google.cloud import memcache_v1 @@ -724,7 +723,6 @@ async def update_parameters( parameters, it must be followed by ApplyParameters to apply the parameters to nodes of the Memcached Instance. - .. code-block:: python from google.cloud import memcache_v1 @@ -977,7 +975,6 @@ async def apply_parameters( nodes in order to update them to the current set of parameters for the Memcached Instance. - .. code-block:: python from google.cloud import memcache_v1 diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 180b4ba..53fc028 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -813,7 +813,6 @@ def update_instance( r"""Updates an existing Instance in a given project and location. - .. code-block:: python from google.cloud import memcache_v1 @@ -946,7 +945,6 @@ def update_parameters( parameters, it must be followed by ApplyParameters to apply the parameters to nodes of the Memcached Instance. - .. code-block:: python from google.cloud import memcache_v1 @@ -1199,7 +1197,6 @@ def apply_parameters( nodes in order to update them to the current set of parameters for the Memcached Instance. - .. code-block:: python from google.cloud import memcache_v1 diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 3549659..1272f2d 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -82,6 +82,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -239,5 +240,9 @@ def apply_parameters( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index a470b86..c5d8f32 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -455,5 +455,9 @@ def apply_parameters( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index c002c1b..bfc1f63 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -62,7 +62,7 @@ class Instance(proto.Message): User provided name for the instance only used for display purposes. Cannot be more than 80 characters. - labels (Sequence[google.cloud.memcache_v1.types.Instance.LabelsEntry]): + labels (Mapping[str, str]): Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. @@ -578,7 +578,7 @@ class MemcacheParameters(proto.Message): instance differ from the parameters associated with the nodes and any action needs to be taken to apply parameters on nodes. - params (Sequence[google.cloud.memcache_v1.types.MemcacheParameters.ParamsEntry]): + params (Mapping[str, str]): User defined set of parameters to use in the memcached process. """ diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index b6d968a..912b38c 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -592,7 +592,6 @@ async def update_instance( r"""Updates an existing Instance in a given project and location. - .. code-block:: python from google.cloud import memcache_v1beta2 @@ -726,7 +725,6 @@ async def update_parameters( followed by ``ApplyParameters`` to apply the parameters to nodes of the Memcached instance. - .. code-block:: python from google.cloud import memcache_v1beta2 @@ -980,7 +978,6 @@ async def apply_parameters( to update them to the current set of parameters for the Memcached Instance. - .. code-block:: python from google.cloud import memcache_v1beta2 @@ -1115,7 +1112,6 @@ async def apply_software_update( r"""Updates software on the selected nodes of the Instance. - .. code-block:: python from google.cloud import memcache_v1beta2 diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index f245684..396803f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -814,7 +814,6 @@ def update_instance( r"""Updates an existing Instance in a given project and location. - .. code-block:: python from google.cloud import memcache_v1beta2 @@ -948,7 +947,6 @@ def update_parameters( followed by ``ApplyParameters`` to apply the parameters to nodes of the Memcached instance. - .. code-block:: python from google.cloud import memcache_v1beta2 @@ -1202,7 +1200,6 @@ def apply_parameters( to update them to the current set of parameters for the Memcached Instance. - .. code-block:: python from google.cloud import memcache_v1beta2 @@ -1337,7 +1334,6 @@ def apply_software_update( r"""Updates software on the selected nodes of the Instance. - .. code-block:: python from google.cloud import memcache_v1beta2 diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 479ea37..0eca00e 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -82,6 +82,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -253,5 +254,9 @@ def apply_software_update( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 6dffaf3..8ac71ba 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -484,5 +484,9 @@ def apply_software_update( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 413d6cf..affb84e 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -67,7 +67,7 @@ class Instance(proto.Message): User provided name for the instance, which is only used for display purposes. Cannot be more than 80 characters. - labels (Sequence[google.cloud.memcache_v1beta2.types.Instance.LabelsEntry]): + labels (Mapping[str, str]): Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. @@ -631,7 +631,7 @@ class MemcacheParameters(proto.Message): Attributes: id (str): Output only. - params (Sequence[google.cloud.memcache_v1beta2.types.MemcacheParameters.ParamsEntry]): + params (Mapping[str, str]): User defined set of parameters to use in the memcached process. """ @@ -714,7 +714,7 @@ class LocationMetadata(proto.Message): [google.cloud.location.Location][google.cloud.location.Location]. Attributes: - available_zones (Sequence[google.cloud.memcache_v1beta2.types.LocationMetadata.AvailableZonesEntry]): + available_zones (Mapping[str, google.cloud.memcache_v1beta2.types.ZoneMetadata]): Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by GCE. These keys can be specified in the ``zones`` field when diff --git a/samples/generated_samples/snippet_metadata_memcache_v1.json b/samples/generated_samples/snippet_metadata_memcache_v1.json index a8d58f6..754496a 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1.json +++ b/samples/generated_samples/snippet_metadata_memcache_v1.json @@ -1,16 +1,69 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.memcache.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-memcache" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.apply_parameters", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.ApplyParameters", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ApplyParameters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.ApplyParametersRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "node_ids", + "type": "Sequence[str]" + }, + { + "name": "apply_all", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "apply_parameters" }, + "description": "Sample for ApplyParameters", "file": "memcache_v1_generated_cloud_memcache_apply_parameters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_ApplyParameters_async", "segments": [ { @@ -43,18 +96,62 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_apply_parameters_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.apply_parameters", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.ApplyParameters", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ApplyParameters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.ApplyParametersRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "node_ids", + "type": "Sequence[str]" + }, + { + "name": "apply_all", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "apply_parameters" }, + "description": "Sample for ApplyParameters", "file": "memcache_v1_generated_cloud_memcache_apply_parameters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_ApplyParameters_sync", "segments": [ { @@ -87,19 +184,63 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_apply_parameters_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.create_instance", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.CreateInstance", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "CreateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.memcache_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" }, + "description": "Sample for CreateInstance", "file": "memcache_v1_generated_cloud_memcache_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_CreateInstance_async", "segments": [ { @@ -132,18 +273,62 @@ "start": 54, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_create_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.create_instance", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.CreateInstance", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "CreateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.memcache_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" }, + "description": "Sample for CreateInstance", "file": "memcache_v1_generated_cloud_memcache_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_CreateInstance_sync", "segments": [ { @@ -176,19 +361,55 @@ "start": 54, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_create_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.delete_instance", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.DeleteInstance", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "DeleteInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" }, + "description": "Sample for DeleteInstance", "file": "memcache_v1_generated_cloud_memcache_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_DeleteInstance_async", "segments": [ { @@ -221,18 +442,54 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_delete_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.delete_instance", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.DeleteInstance", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "DeleteInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" }, + "description": "Sample for DeleteInstance", "file": "memcache_v1_generated_cloud_memcache_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_DeleteInstance_sync", "segments": [ { @@ -265,19 +522,55 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_delete_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.get_instance", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.GetInstance", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "GetInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memcache_v1.types.Instance", + "shortName": "get_instance" }, + "description": "Sample for GetInstance", "file": "memcache_v1_generated_cloud_memcache_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_GetInstance_async", "segments": [ { @@ -310,18 +603,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_get_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.get_instance", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.GetInstance", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "GetInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memcache_v1.types.Instance", + "shortName": "get_instance" }, + "description": "Sample for GetInstance", "file": "memcache_v1_generated_cloud_memcache_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_GetInstance_sync", "segments": [ { @@ -354,19 +683,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_get_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.list_instances", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.ListInstances", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ListInstances" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" }, + "description": "Sample for ListInstances", "file": "memcache_v1_generated_cloud_memcache_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_ListInstances_async", "segments": [ { @@ -399,18 +764,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_list_instances_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.list_instances", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.ListInstances", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ListInstances" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesPager", + "shortName": "list_instances" }, + "description": "Sample for ListInstances", "file": "memcache_v1_generated_cloud_memcache_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_ListInstances_sync", "segments": [ { @@ -443,19 +844,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_list_instances_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.update_instance", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.UpdateInstance", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "UpdateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.memcache_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" }, + "description": "Sample for UpdateInstance", "file": "memcache_v1_generated_cloud_memcache_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_UpdateInstance_async", "segments": [ { @@ -488,18 +929,58 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_update_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.update_instance", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.UpdateInstance", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "UpdateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.memcache_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" }, + "description": "Sample for UpdateInstance", "file": "memcache_v1_generated_cloud_memcache_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_UpdateInstance_sync", "segments": [ { @@ -532,19 +1013,63 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_update_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.update_parameters", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.UpdateParameters", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "UpdateParameters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.UpdateParametersRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "parameters", + "type": "google.cloud.memcache_v1.types.MemcacheParameters" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_parameters" }, + "description": "Sample for UpdateParameters", "file": "memcache_v1_generated_cloud_memcache_update_parameters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_UpdateParameters_async", "segments": [ { @@ -577,18 +1102,62 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_update_parameters_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.update_parameters", "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.UpdateParameters", "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "UpdateParameters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.UpdateParametersRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "parameters", + "type": "google.cloud.memcache_v1.types.MemcacheParameters" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_parameters" }, + "description": "Sample for UpdateParameters", "file": "memcache_v1_generated_cloud_memcache_update_parameters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1_generated_CloudMemcache_UpdateParameters_sync", "segments": [ { @@ -621,7 +1190,8 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1_generated_cloud_memcache_update_parameters_sync.py" } ] } diff --git a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json b/samples/generated_samples/snippet_metadata_memcache_v1beta2.json index a62f8ef..d2dd1a7 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json +++ b/samples/generated_samples/snippet_metadata_memcache_v1beta2.json @@ -1,16 +1,69 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.memcache.v1beta2", + "version": "v1beta2" + } + ], + "language": "PYTHON", + "name": "google-cloud-memcache" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.apply_parameters", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ApplyParameters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.ApplyParametersRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "node_ids", + "type": "Sequence[str]" + }, + { + "name": "apply_all", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "apply_parameters" }, + "description": "Sample for ApplyParameters", "file": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async", "segments": [ { @@ -43,18 +96,62 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.apply_parameters", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ApplyParameters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.ApplyParametersRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "node_ids", + "type": "Sequence[str]" + }, + { + "name": "apply_all", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "apply_parameters" }, + "description": "Sample for ApplyParameters", "file": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync", "segments": [ { @@ -87,19 +184,63 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.apply_software_update", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ApplySoftwareUpdate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "node_ids", + "type": "Sequence[str]" + }, + { + "name": "apply_all", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "apply_software_update" }, + "description": "Sample for ApplySoftwareUpdate", "file": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async", "segments": [ { @@ -132,18 +273,62 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.apply_software_update", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ApplySoftwareUpdate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "node_ids", + "type": "Sequence[str]" + }, + { + "name": "apply_all", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "apply_software_update" }, + "description": "Sample for ApplySoftwareUpdate", "file": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync", "segments": [ { @@ -176,19 +361,63 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.create_instance", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "CreateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "resource", + "type": "google.cloud.memcache_v1beta2.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" }, + "description": "Sample for CreateInstance", "file": "memcache_v1beta2_generated_cloud_memcache_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_CreateInstance_async", "segments": [ { @@ -221,18 +450,62 @@ "start": 54, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_create_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.create_instance", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "CreateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "resource", + "type": "google.cloud.memcache_v1beta2.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" }, + "description": "Sample for CreateInstance", "file": "memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync", "segments": [ { @@ -265,19 +538,55 @@ "start": 54, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.delete_instance", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "DeleteInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" }, + "description": "Sample for DeleteInstance", "file": "memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async", "segments": [ { @@ -310,18 +619,54 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.delete_instance", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "DeleteInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" }, + "description": "Sample for DeleteInstance", "file": "memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync", "segments": [ { @@ -354,19 +699,55 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.get_instance", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.GetInstance", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "GetInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memcache_v1beta2.types.Instance", + "shortName": "get_instance" }, + "description": "Sample for GetInstance", "file": "memcache_v1beta2_generated_cloud_memcache_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_GetInstance_async", "segments": [ { @@ -399,18 +780,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_get_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.get_instance", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.GetInstance", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "GetInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memcache_v1beta2.types.Instance", + "shortName": "get_instance" }, + "description": "Sample for GetInstance", "file": "memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_GetInstance_sync", "segments": [ { @@ -443,19 +860,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.list_instances", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ListInstances", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ListInstances" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" }, + "description": "Sample for ListInstances", "file": "memcache_v1beta2_generated_cloud_memcache_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_ListInstances_async", "segments": [ { @@ -488,18 +941,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_list_instances_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.list_instances", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ListInstances", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "ListInstances" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesPager", + "shortName": "list_instances" }, + "description": "Sample for ListInstances", "file": "memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_ListInstances_sync", "segments": [ { @@ -532,19 +1021,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.update_instance", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "UpdateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.UpdateInstanceRequest" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "resource", + "type": "google.cloud.memcache_v1beta2.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" }, + "description": "Sample for UpdateInstance", "file": "memcache_v1beta2_generated_cloud_memcache_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async", "segments": [ { @@ -577,18 +1106,58 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_update_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.update_instance", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "UpdateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.UpdateInstanceRequest" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "resource", + "type": "google.cloud.memcache_v1beta2.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" }, + "description": "Sample for UpdateInstance", "file": "memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync", "segments": [ { @@ -621,19 +1190,63 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.update_parameters", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "UpdateParameters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.UpdateParametersRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "parameters", + "type": "google.cloud.memcache_v1beta2.types.MemcacheParameters" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_parameters" }, + "description": "Sample for UpdateParameters", "file": "memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async", "segments": [ { @@ -666,18 +1279,62 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.update_parameters", "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters", "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", "shortName": "CloudMemcache" }, "shortName": "UpdateParameters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.UpdateParametersRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "parameters", + "type": "google.cloud.memcache_v1beta2.types.MemcacheParameters" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_parameters" }, + "description": "Sample for UpdateParameters", "file": "memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync", "segments": [ { @@ -710,7 +1367,8 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py" } ] } diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 7ad6060..f41ad7e 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -92,24 +92,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - CloudMemcacheClient, - CloudMemcacheAsyncClient, + (CloudMemcacheClient, "grpc"), + (CloudMemcacheAsyncClient, "grpc_asyncio"), ], ) -def test_cloud_memcache_client_from_service_account_info(client_class): +def test_cloud_memcache_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "memcache.googleapis.com:443" + assert client.transport._host == ("memcache.googleapis.com:443") @pytest.mark.parametrize( @@ -138,27 +138,31 @@ def test_cloud_memcache_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - CloudMemcacheClient, - CloudMemcacheAsyncClient, + (CloudMemcacheClient, "grpc"), + (CloudMemcacheAsyncClient, "grpc_asyncio"), ], ) -def test_cloud_memcache_client_from_service_account_file(client_class): +def test_cloud_memcache_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "memcache.googleapis.com:443" + assert client.transport._host == ("memcache.googleapis.com:443") def test_cloud_memcache_client_get_transport_class(): @@ -1018,7 +1022,7 @@ async def test_list_instances_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1064,7 +1068,9 @@ async def test_list_instances_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_instances(request={})).pages: + async for page_ in ( + await client.list_instances(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2639,6 +2645,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = CloudMemcacheClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudMemcacheClient( @@ -2692,6 +2711,14 @@ def test_cloud_memcache_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2837,24 +2864,40 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla ) -def test_cloud_memcache_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cloud_memcache_host_no_port(transport_name): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "memcache.googleapis.com:443" + assert client.transport._host == ("memcache.googleapis.com:443") -def test_cloud_memcache_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cloud_memcache_host_with_port(transport_name): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "memcache.googleapis.com:8000" + assert client.transport._host == ("memcache.googleapis.com:8000") def test_cloud_memcache_grpc_transport_channel(): diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index c57d3b5..d02246e 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -94,24 +94,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - CloudMemcacheClient, - CloudMemcacheAsyncClient, + (CloudMemcacheClient, "grpc"), + (CloudMemcacheAsyncClient, "grpc_asyncio"), ], ) -def test_cloud_memcache_client_from_service_account_info(client_class): +def test_cloud_memcache_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "memcache.googleapis.com:443" + assert client.transport._host == ("memcache.googleapis.com:443") @pytest.mark.parametrize( @@ -140,27 +140,31 @@ def test_cloud_memcache_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - CloudMemcacheClient, - CloudMemcacheAsyncClient, + (CloudMemcacheClient, "grpc"), + (CloudMemcacheAsyncClient, "grpc_asyncio"), ], ) -def test_cloud_memcache_client_from_service_account_file(client_class): +def test_cloud_memcache_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "memcache.googleapis.com:443" + assert client.transport._host == ("memcache.googleapis.com:443") def test_cloud_memcache_client_get_transport_class(): @@ -1020,7 +1024,7 @@ async def test_list_instances_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1066,7 +1070,9 @@ async def test_list_instances_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_instances(request={})).pages: + async for page_ in ( + await client.list_instances(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2906,6 +2912,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = CloudMemcacheClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudMemcacheClient( @@ -2960,6 +2979,14 @@ def test_cloud_memcache_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_cloud_memcache_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -3105,24 +3132,40 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla ) -def test_cloud_memcache_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cloud_memcache_host_no_port(transport_name): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "memcache.googleapis.com:443" + assert client.transport._host == ("memcache.googleapis.com:443") -def test_cloud_memcache_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cloud_memcache_host_with_port(transport_name): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="memcache.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "memcache.googleapis.com:8000" + assert client.transport._host == ("memcache.googleapis.com:8000") def test_cloud_memcache_grpc_transport_channel(): From e7a39829fc3b06ce1c127a593e98f02f6e94b786 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 21:11:58 -0400 Subject: [PATCH 098/159] chore(python): add nox session to sort python imports (#168) Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- docs/conf.py | 2 +- google/cloud/memcache/__init__.py | 29 ++++++------ google/cloud/memcache_v1/__init__.py | 30 ++++++------ .../services/cloud_memcache/__init__.py | 2 +- .../services/cloud_memcache/async_client.py | 14 +++--- .../services/cloud_memcache/client.py | 12 +++-- .../services/cloud_memcache/pagers.py | 4 +- .../cloud_memcache/transports/__init__.py | 1 - .../cloud_memcache/transports/base.py | 9 ++-- .../cloud_memcache/transports/grpc.py | 12 ++--- .../cloud_memcache/transports/grpc_asyncio.py | 12 ++--- google/cloud/memcache_v1/types/__init__.py | 2 +- .../cloud/memcache_v1/types/cloud_memcache.py | 4 +- google/cloud/memcache_v1beta2/__init__.py | 36 +++++++------- .../services/cloud_memcache/__init__.py | 2 +- .../services/cloud_memcache/async_client.py | 14 +++--- .../services/cloud_memcache/client.py | 12 +++-- .../services/cloud_memcache/pagers.py | 4 +- .../cloud_memcache/transports/__init__.py | 1 - .../cloud_memcache/transports/base.py | 9 ++-- .../cloud_memcache/transports/grpc.py | 12 ++--- .../cloud_memcache/transports/grpc_asyncio.py | 12 ++--- .../cloud/memcache_v1beta2/types/__init__.py | 2 +- .../memcache_v1beta2/types/cloud_memcache.py | 4 +- noxfile.py | 28 +++++++++-- setup.py | 1 + .../gapic/memcache_v1/test_cloud_memcache.py | 45 +++++++++--------- .../memcache_v1beta2/test_cloud_memcache.py | 47 ++++++++++--------- 29 files changed, 193 insertions(+), 173 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index bc893c9..7c454ab 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 -# created: 2022-04-06T10:30:21.687684602Z + digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 +# created: 2022-04-20T23:42:53.970438194Z diff --git a/docs/conf.py b/docs/conf.py index 7e01871..1f40ca9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -24,9 +24,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index 9c33853..0728147 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -14,23 +14,24 @@ # limitations under the License. # -from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( CloudMemcacheAsyncClient, ) - -from google.cloud.memcache_v1.types.cloud_memcache import ApplyParametersRequest -from google.cloud.memcache_v1.types.cloud_memcache import CreateInstanceRequest -from google.cloud.memcache_v1.types.cloud_memcache import DeleteInstanceRequest -from google.cloud.memcache_v1.types.cloud_memcache import GetInstanceRequest -from google.cloud.memcache_v1.types.cloud_memcache import Instance -from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesRequest -from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesResponse -from google.cloud.memcache_v1.types.cloud_memcache import MemcacheParameters -from google.cloud.memcache_v1.types.cloud_memcache import OperationMetadata -from google.cloud.memcache_v1.types.cloud_memcache import UpdateInstanceRequest -from google.cloud.memcache_v1.types.cloud_memcache import UpdateParametersRequest -from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion +from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient +from google.cloud.memcache_v1.types.cloud_memcache import ( + ApplyParametersRequest, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + MemcacheParameters, + MemcacheVersion, + OperationMetadata, + UpdateInstanceRequest, + UpdateParametersRequest, +) __all__ = ( "CloudMemcacheClient", diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py index 02277b0..a4dbffd 100644 --- a/google/cloud/memcache_v1/__init__.py +++ b/google/cloud/memcache_v1/__init__.py @@ -14,21 +14,21 @@ # limitations under the License. # -from .services.cloud_memcache import CloudMemcacheClient -from .services.cloud_memcache import CloudMemcacheAsyncClient - -from .types.cloud_memcache import ApplyParametersRequest -from .types.cloud_memcache import CreateInstanceRequest -from .types.cloud_memcache import DeleteInstanceRequest -from .types.cloud_memcache import GetInstanceRequest -from .types.cloud_memcache import Instance -from .types.cloud_memcache import ListInstancesRequest -from .types.cloud_memcache import ListInstancesResponse -from .types.cloud_memcache import MemcacheParameters -from .types.cloud_memcache import OperationMetadata -from .types.cloud_memcache import UpdateInstanceRequest -from .types.cloud_memcache import UpdateParametersRequest -from .types.cloud_memcache import MemcacheVersion +from .services.cloud_memcache import CloudMemcacheAsyncClient, CloudMemcacheClient +from .types.cloud_memcache import ( + ApplyParametersRequest, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + MemcacheParameters, + MemcacheVersion, + OperationMetadata, + UpdateInstanceRequest, + UpdateParametersRequest, +) __all__ = ( "CloudMemcacheAsyncClient", diff --git a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py index 64e72f6..61c41a1 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import CloudMemcacheClient from .async_client import CloudMemcacheAsyncClient +from .client import CloudMemcacheClient __all__ = ( "CloudMemcacheClient", diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 501b5f6..43733a5 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -17,14 +17,14 @@ import functools import re from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -33,14 +33,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.memcache_v1.services.cloud_memcache import pagers -from google.cloud.memcache_v1.types import cloud_memcache from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport + +from google.cloud.memcache_v1.services.cloud_memcache import pagers +from google.cloud.memcache_v1.types import cloud_memcache + from .client import CloudMemcacheClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport +from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport class CloudMemcacheAsyncClient: diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 53fc028..f1f1153 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -17,17 +17,17 @@ import os import re from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -36,12 +36,14 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.memcache_v1.services.cloud_memcache import pagers -from google.cloud.memcache_v1.types import cloud_memcache from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO + +from google.cloud.memcache_v1.services.cloud_memcache import pagers +from google.cloud.memcache_v1.types import cloud_memcache + +from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py index 45f1d9a..306970d 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py @@ -18,10 +18,10 @@ AsyncIterator, Awaitable, Callable, + Iterator, + Optional, Sequence, Tuple, - Optional, - Iterator, ) from google.cloud.memcache_v1.types import cloud_memcache diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py index 71932bd..ab7c86d 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py @@ -20,7 +20,6 @@ from .grpc import CloudMemcacheGrpcTransport from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] _transport_registry["grpc"] = CloudMemcacheGrpcTransport diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 1272f2d..2937a77 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -15,19 +15,18 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index c5d8f32..861f134 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -13,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore -from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport class CloudMemcacheGrpcTransport(CloudMemcacheTransport): diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index 26de320..6d6f578 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -13,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.memcache_v1.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore -from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport from .grpc import CloudMemcacheGrpcTransport diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index 7c57b55..adc89ef 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -22,10 +22,10 @@ ListInstancesRequest, ListInstancesResponse, MemcacheParameters, + MemcacheVersion, OperationMetadata, UpdateInstanceRequest, UpdateParametersRequest, - MemcacheVersion, ) __all__ = ( diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index bfc1f63..1077426 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -13,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore - from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.memcache.v1", diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index 9ad9112..0efe11e 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -14,24 +14,24 @@ # limitations under the License. # -from .services.cloud_memcache import CloudMemcacheClient -from .services.cloud_memcache import CloudMemcacheAsyncClient - -from .types.cloud_memcache import ApplyParametersRequest -from .types.cloud_memcache import ApplySoftwareUpdateRequest -from .types.cloud_memcache import CreateInstanceRequest -from .types.cloud_memcache import DeleteInstanceRequest -from .types.cloud_memcache import GetInstanceRequest -from .types.cloud_memcache import Instance -from .types.cloud_memcache import ListInstancesRequest -from .types.cloud_memcache import ListInstancesResponse -from .types.cloud_memcache import LocationMetadata -from .types.cloud_memcache import MemcacheParameters -from .types.cloud_memcache import OperationMetadata -from .types.cloud_memcache import UpdateInstanceRequest -from .types.cloud_memcache import UpdateParametersRequest -from .types.cloud_memcache import ZoneMetadata -from .types.cloud_memcache import MemcacheVersion +from .services.cloud_memcache import CloudMemcacheAsyncClient, CloudMemcacheClient +from .types.cloud_memcache import ( + ApplyParametersRequest, + ApplySoftwareUpdateRequest, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + LocationMetadata, + MemcacheParameters, + MemcacheVersion, + OperationMetadata, + UpdateInstanceRequest, + UpdateParametersRequest, + ZoneMetadata, +) __all__ = ( "CloudMemcacheAsyncClient", diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py index 64e72f6..61c41a1 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import CloudMemcacheClient from .async_client import CloudMemcacheAsyncClient +from .client import CloudMemcacheClient __all__ = ( "CloudMemcacheClient", diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 912b38c..dd63fb3 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -17,14 +17,14 @@ import functools import re from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -33,14 +33,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers -from google.cloud.memcache_v1beta2.types import cloud_memcache from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport + +from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers +from google.cloud.memcache_v1beta2.types import cloud_memcache + from .client import CloudMemcacheClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport +from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport class CloudMemcacheAsyncClient: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 396803f..a9ffe7e 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -17,17 +17,17 @@ import os import re from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -36,12 +36,14 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers -from google.cloud.memcache_v1beta2.types import cloud_memcache from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO + +from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers +from google.cloud.memcache_v1beta2.types import cloud_memcache + +from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index 30c5652..3bb67cb 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -18,10 +18,10 @@ AsyncIterator, Awaitable, Callable, + Iterator, + Optional, Sequence, Tuple, - Optional, - Iterator, ) from google.cloud.memcache_v1beta2.types import cloud_memcache diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py index 71932bd..ab7c86d 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py @@ -20,7 +20,6 @@ from .grpc import CloudMemcacheGrpcTransport from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] _transport_registry["grpc"] = CloudMemcacheGrpcTransport diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 0eca00e..e19d086 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -15,19 +15,18 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 8ac71ba..b5fffbc 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -13,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore -from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport class CloudMemcacheGrpcTransport(CloudMemcacheTransport): diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index f5a6a6b..7c00609 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -13,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.longrunning import operations_pb2 # type: ignore -from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport from .grpc import CloudMemcacheGrpcTransport diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 005a5bd..073427d 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -24,11 +24,11 @@ ListInstancesResponse, LocationMetadata, MemcacheParameters, + MemcacheVersion, OperationMetadata, UpdateInstanceRequest, UpdateParametersRequest, ZoneMetadata, - MemcacheVersion, ) __all__ = ( diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index affb84e..b4b5c8c 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -13,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore - from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.memcache.v1beta2", diff --git a/noxfile.py b/noxfile.py index 6ee5e8a..7c1742d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,6 +17,7 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import shutil @@ -25,7 +26,8 @@ import nox BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -83,7 +85,7 @@ def lint(session): session.run( "black", "--check", - *BLACK_PATHS, + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -94,7 +96,27 @@ def blacken(session): session.install(BLACK_VERSION) session.run( "black", - *BLACK_PATHS, + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) diff --git a/setup.py b/setup.py index bdf4c8f..68101f7 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,7 @@ import io import os + import setuptools # type: ignore version = "1.3.1" diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index f41ad7e..1283dc7 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -13,38 +13,41 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import os -import mock - -import grpc -from grpc.experimental import aio import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - +import os +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheAsyncClient -from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheClient -from google.cloud.memcache_v1.services.cloud_memcache import pagers -from google.cloud.memcache_v1.services.cloud_memcache import transports -from google.cloud.memcache_v1.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import google.auth +import grpc +from grpc.experimental import aio +import mock +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.memcache_v1.services.cloud_memcache import ( + CloudMemcacheAsyncClient, + CloudMemcacheClient, + pagers, + transports, +) +from google.cloud.memcache_v1.types import cloud_memcache def client_cert_source_callback(): diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index d02246e..af9022e 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -13,40 +13,41 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import os -import mock - -import grpc -from grpc.experimental import aio import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - +import os +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.memcache_v1beta2.services.cloud_memcache import ( - CloudMemcacheAsyncClient, -) -from google.cloud.memcache_v1beta2.services.cloud_memcache import CloudMemcacheClient -from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers -from google.cloud.memcache_v1beta2.services.cloud_memcache import transports -from google.cloud.memcache_v1beta2.types import cloud_memcache from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import google.auth +import grpc +from grpc.experimental import aio +import mock +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.memcache_v1beta2.services.cloud_memcache import ( + CloudMemcacheAsyncClient, + CloudMemcacheClient, + pagers, + transports, +) +from google.cloud.memcache_v1beta2.types import cloud_memcache def client_cert_source_callback(): From 298872948a2f257575100daa0a714fac84523e0a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 16:18:25 +0000 Subject: [PATCH 099/159] chore(python): use ubuntu 22.04 in docs image (#170) Source-Link: https://github.com/googleapis/synthtool/commit/f15cc72fb401b4861cedebb10af74afe428fb1f8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/docker/docs/Dockerfile | 20 ++++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7c454ab..64f82d6 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 -# created: 2022-04-20T23:42:53.970438194Z + digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd +# created: 2022-04-21T15:43:16.246106921Z diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 4e1b1fb..238b87b 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] From e934a67518b8932e216f4c8272f368d13976d3fd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 28 Apr 2022 08:06:02 -0400 Subject: [PATCH 100/159] chore: use gapic-generator-python 0.65.2 (#171) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.2 PiperOrigin-RevId: 444333013 Source-Link: https://github.com/googleapis/googleapis/commit/f91b6cf82e929280f6562f6110957c654bd9e2e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/16eb36095c294e712c74a1bf23550817b42174e5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZlYjM2MDk1YzI5NGU3MTJjNzRhMWJmMjM1NTA4MTdiNDIxNzRlNSJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/cloud_memcache/async_client.py | 42 ++++++------ .../services/cloud_memcache/async_client.py | 48 +++++++------- .../gapic/memcache_v1/test_cloud_memcache.py | 58 ++++++++-------- .../memcache_v1beta2/test_cloud_memcache.py | 66 +++++++++---------- 4 files changed, 107 insertions(+), 107 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 43733a5..3fc0230 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -243,9 +243,9 @@ async def list_instances( from google.cloud import memcache_v1 - def sample_list_instances(): + async def sample_list_instances(): # Create a client - client = memcache_v1.CloudMemcacheClient() + client = memcache_v1.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1.ListInstancesRequest( @@ -256,7 +256,7 @@ def sample_list_instances(): page_result = client.list_instances(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -353,9 +353,9 @@ async def get_instance( from google.cloud import memcache_v1 - def sample_get_instance(): + async def sample_get_instance(): # Create a client - client = memcache_v1.CloudMemcacheClient() + client = memcache_v1.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1.GetInstanceRequest( @@ -363,7 +363,7 @@ def sample_get_instance(): ) # Make the request - response = client.get_instance(request=request) + response = await client.get_instance(request=request) # Handle the response print(response) @@ -450,9 +450,9 @@ async def create_instance( from google.cloud import memcache_v1 - def sample_create_instance(): + async def sample_create_instance(): # Create a client - client = memcache_v1.CloudMemcacheClient() + client = memcache_v1.CloudMemcacheAsyncClient() # Initialize request argument(s) instance = memcache_v1.Instance() @@ -472,7 +472,7 @@ def sample_create_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -597,9 +597,9 @@ async def update_instance( from google.cloud import memcache_v1 - def sample_update_instance(): + async def sample_update_instance(): # Create a client - client = memcache_v1.CloudMemcacheClient() + client = memcache_v1.CloudMemcacheAsyncClient() # Initialize request argument(s) instance = memcache_v1.Instance() @@ -617,7 +617,7 @@ def sample_update_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -729,9 +729,9 @@ async def update_parameters( from google.cloud import memcache_v1 - def sample_update_parameters(): + async def sample_update_parameters(): # Create a client - client = memcache_v1.CloudMemcacheClient() + client = memcache_v1.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1.UpdateParametersRequest( @@ -743,7 +743,7 @@ def sample_update_parameters(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -855,9 +855,9 @@ async def delete_instance( from google.cloud import memcache_v1 - def sample_delete_instance(): + async def sample_delete_instance(): # Create a client - client = memcache_v1.CloudMemcacheClient() + client = memcache_v1.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1.DeleteInstanceRequest( @@ -869,7 +869,7 @@ def sample_delete_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -981,9 +981,9 @@ async def apply_parameters( from google.cloud import memcache_v1 - def sample_apply_parameters(): + async def sample_apply_parameters(): # Create a client - client = memcache_v1.CloudMemcacheClient() + client = memcache_v1.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1.ApplyParametersRequest( @@ -995,7 +995,7 @@ def sample_apply_parameters(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index dd63fb3..da31dc5 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -243,9 +243,9 @@ async def list_instances( from google.cloud import memcache_v1beta2 - def sample_list_instances(): + async def sample_list_instances(): # Create a client - client = memcache_v1beta2.CloudMemcacheClient() + client = memcache_v1beta2.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1beta2.ListInstancesRequest( @@ -256,7 +256,7 @@ def sample_list_instances(): page_result = client.list_instances(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -353,9 +353,9 @@ async def get_instance( from google.cloud import memcache_v1beta2 - def sample_get_instance(): + async def sample_get_instance(): # Create a client - client = memcache_v1beta2.CloudMemcacheClient() + client = memcache_v1beta2.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1beta2.GetInstanceRequest( @@ -363,7 +363,7 @@ def sample_get_instance(): ) # Make the request - response = client.get_instance(request=request) + response = await client.get_instance(request=request) # Handle the response print(response) @@ -450,9 +450,9 @@ async def create_instance( from google.cloud import memcache_v1beta2 - def sample_create_instance(): + async def sample_create_instance(): # Create a client - client = memcache_v1beta2.CloudMemcacheClient() + client = memcache_v1beta2.CloudMemcacheAsyncClient() # Initialize request argument(s) resource = memcache_v1beta2.Instance() @@ -472,7 +472,7 @@ def sample_create_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -598,9 +598,9 @@ async def update_instance( from google.cloud import memcache_v1beta2 - def sample_update_instance(): + async def sample_update_instance(): # Create a client - client = memcache_v1beta2.CloudMemcacheClient() + client = memcache_v1beta2.CloudMemcacheAsyncClient() # Initialize request argument(s) resource = memcache_v1beta2.Instance() @@ -618,7 +618,7 @@ def sample_update_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -731,9 +731,9 @@ async def update_parameters( from google.cloud import memcache_v1beta2 - def sample_update_parameters(): + async def sample_update_parameters(): # Create a client - client = memcache_v1beta2.CloudMemcacheClient() + client = memcache_v1beta2.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1beta2.UpdateParametersRequest( @@ -745,7 +745,7 @@ def sample_update_parameters(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -858,9 +858,9 @@ async def delete_instance( from google.cloud import memcache_v1beta2 - def sample_delete_instance(): + async def sample_delete_instance(): # Create a client - client = memcache_v1beta2.CloudMemcacheClient() + client = memcache_v1beta2.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1beta2.DeleteInstanceRequest( @@ -872,7 +872,7 @@ def sample_delete_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -984,9 +984,9 @@ async def apply_parameters( from google.cloud import memcache_v1beta2 - def sample_apply_parameters(): + async def sample_apply_parameters(): # Create a client - client = memcache_v1beta2.CloudMemcacheClient() + client = memcache_v1beta2.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1beta2.ApplyParametersRequest( @@ -998,7 +998,7 @@ def sample_apply_parameters(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -1118,9 +1118,9 @@ async def apply_software_update( from google.cloud import memcache_v1beta2 - def sample_apply_software_update(): + async def sample_apply_software_update(): # Create a client - client = memcache_v1beta2.CloudMemcacheClient() + client = memcache_v1beta2.CloudMemcacheAsyncClient() # Initialize request argument(s) request = memcache_v1beta2.ApplySoftwareUpdateRequest( @@ -1132,7 +1132,7 @@ def sample_apply_software_update(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 1283dc7..fec4e83 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -757,7 +757,7 @@ def test_list_instances_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -773,7 +773,7 @@ def test_list_instances_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -787,7 +787,7 @@ async def test_list_instances_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -805,7 +805,7 @@ async def test_list_instances_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -936,7 +936,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, cloud_memcache.Instance) for i in results) @@ -1209,7 +1209,7 @@ def test_get_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1225,7 +1225,7 @@ def test_get_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1239,7 +1239,7 @@ async def test_get_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1257,7 +1257,7 @@ async def test_get_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1435,7 +1435,7 @@ def test_create_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1451,7 +1451,7 @@ def test_create_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1465,7 +1465,7 @@ async def test_create_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1483,7 +1483,7 @@ async def test_create_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1681,7 +1681,7 @@ def test_update_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() - request.instance.name = "instance.name/value" + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1697,7 +1697,7 @@ def test_update_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "instance.name=instance.name/value", + "instance.name=name_value", ) in kw["metadata"] @@ -1711,7 +1711,7 @@ async def test_update_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() - request.instance.name = "instance.name/value" + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1729,7 +1729,7 @@ async def test_update_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "instance.name=instance.name/value", + "instance.name=name_value", ) in kw["metadata"] @@ -1923,7 +1923,7 @@ def test_update_parameters_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1941,7 +1941,7 @@ def test_update_parameters_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1955,7 +1955,7 @@ async def test_update_parameters_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1975,7 +1975,7 @@ async def test_update_parameters_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2177,7 +2177,7 @@ def test_delete_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -2193,7 +2193,7 @@ def test_delete_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2207,7 +2207,7 @@ async def test_delete_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -2225,7 +2225,7 @@ async def test_delete_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2403,7 +2403,7 @@ def test_apply_parameters_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2419,7 +2419,7 @@ def test_apply_parameters_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2433,7 +2433,7 @@ async def test_apply_parameters_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2451,7 +2451,7 @@ async def test_apply_parameters_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index af9022e..df5e7e4 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -757,7 +757,7 @@ def test_list_instances_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -773,7 +773,7 @@ def test_list_instances_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -787,7 +787,7 @@ async def test_list_instances_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.ListInstancesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -805,7 +805,7 @@ async def test_list_instances_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -936,7 +936,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, cloud_memcache.Instance) for i in results) @@ -1213,7 +1213,7 @@ def test_get_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1229,7 +1229,7 @@ def test_get_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1243,7 +1243,7 @@ async def test_get_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.GetInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1261,7 +1261,7 @@ async def test_get_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1439,7 +1439,7 @@ def test_create_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1455,7 +1455,7 @@ def test_create_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1469,7 +1469,7 @@ async def test_create_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.CreateInstanceRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1487,7 +1487,7 @@ async def test_create_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1685,7 +1685,7 @@ def test_update_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() - request.resource.name = "resource.name/value" + request.resource.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1701,7 +1701,7 @@ def test_update_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource.name=resource.name/value", + "resource.name=name_value", ) in kw["metadata"] @@ -1715,7 +1715,7 @@ async def test_update_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateInstanceRequest() - request.resource.name = "resource.name/value" + request.resource.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1733,7 +1733,7 @@ async def test_update_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource.name=resource.name/value", + "resource.name=name_value", ) in kw["metadata"] @@ -1927,7 +1927,7 @@ def test_update_parameters_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1945,7 +1945,7 @@ def test_update_parameters_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1959,7 +1959,7 @@ async def test_update_parameters_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.UpdateParametersRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1979,7 +1979,7 @@ async def test_update_parameters_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2181,7 +2181,7 @@ def test_delete_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -2197,7 +2197,7 @@ def test_delete_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2211,7 +2211,7 @@ async def test_delete_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.DeleteInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -2229,7 +2229,7 @@ async def test_delete_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2407,7 +2407,7 @@ def test_apply_parameters_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2423,7 +2423,7 @@ def test_apply_parameters_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2437,7 +2437,7 @@ async def test_apply_parameters_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.ApplyParametersRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: @@ -2455,7 +2455,7 @@ async def test_apply_parameters_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2660,7 +2660,7 @@ def test_apply_software_update_field_headers(): # a field header. Set these to a non-empty value. request = cloud_memcache.ApplySoftwareUpdateRequest() - request.instance = "instance/value" + request.instance = "instance_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2678,7 +2678,7 @@ def test_apply_software_update_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "instance=instance/value", + "instance=instance_value", ) in kw["metadata"] @@ -2692,7 +2692,7 @@ async def test_apply_software_update_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_memcache.ApplySoftwareUpdateRequest() - request.instance = "instance/value" + request.instance = "instance_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2712,7 +2712,7 @@ async def test_apply_software_update_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "instance=instance/value", + "instance=instance_value", ) in kw["metadata"] From 47fd30e07199a74d496991a6427c663f58fcf20c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 12:19:53 -0400 Subject: [PATCH 101/159] chore: [autoapprove] update readme_gen.py to include autoescape True (#172) Source-Link: https://github.com/googleapis/synthtool/commit/6b4d5a6407d740beb4158b302194a62a4108a8a6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- scripts/readme-gen/readme_gen.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 64f82d6..b631901 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd -# created: 2022-04-21T15:43:16.246106921Z + digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 +# created: 2022-05-05T15:17:27.599381182Z diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index d309d6e..91b5967 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') From d6f5bfc806860954e335d4ae57f0f3fa09839026 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 23:18:24 +0000 Subject: [PATCH 102/159] chore(python): auto approve template changes (#174) Source-Link: https://github.com/googleapis/synthtool/commit/453a5d9c9a55d1969240a37d36cec626d20a9024 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-approve.yml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 .github/auto-approve.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b631901..757c9dc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 -# created: 2022-05-05T15:17:27.599381182Z + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 0000000..311ebbb --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" From 8b7bfab83997b0a2fbe19f2dac28c1ab51f1ef94 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 May 2022 17:28:18 +0000 Subject: [PATCH 103/159] chore: use gapic-generator-python 1.0.0 (#182) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 451250442 Source-Link: https://github.com/googleapis/googleapis/commit/cca5e8181f6442b134e8d4d206fbe9e0e74684ba Source-Link: https://github.com/googleapis/googleapis-gen/commit/0b219da161a8bdcc3c6f7b2efcd82105182a30ca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGIyMTlkYTE2MWE4YmRjYzNjNmY3YjJlZmNkODIxMDUxODJhMzBjYSJ9 --- tests/unit/gapic/memcache_v1/test_cloud_memcache.py | 11 +++++++++-- .../gapic/memcache_v1beta2/test_cloud_memcache.py | 11 +++++++++-- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index fec4e83..aae4115 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -13,9 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import math import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock + +import math + from google.api_core import ( future, gapic_v1, @@ -37,7 +45,6 @@ from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio -import mock from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index df5e7e4..5867efe 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -13,9 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import math import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock + +import math + from google.api_core import ( future, gapic_v1, @@ -37,7 +45,6 @@ from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio -import mock from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest From acc06a7c8564d272617a66456ac2a002b463443f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 1 Jun 2022 14:04:39 -0400 Subject: [PATCH 104/159] fix(deps): require protobuf <4.0.0dev (#184) --- setup.py | 3 ++- testing/constraints-3.6.txt | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 68101f7..77332f2 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,8 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", - "proto-plus >= 1.15.0", + "proto-plus >= 1.15.0, <2.0.0dev", + "protobuf >= 3.19.0, <4.0.0dev", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index be5a64f..786e637 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -6,3 +6,4 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.31.5 proto-plus==1.15.0 +protobuf==3.19.0 From 3647e5f70d1d43e388d25f11fee9d730c453732d Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Thu, 2 Jun 2022 20:54:24 -0400 Subject: [PATCH 105/159] docs: fix changelog header to consistent size (#183) Co-authored-by: Anthonios Partheniou --- CHANGELOG.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 21c658b..a76aec2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -### [1.3.1](https://github.com/googleapis/python-memcache/compare/v1.3.0...v1.3.1) (2022-03-05) +## [1.3.1](https://github.com/googleapis/python-memcache/compare/v1.3.0...v1.3.1) (2022-03-05) ### Bug Fixes @@ -20,7 +20,7 @@ * resolve DuplicateCredentialArgs error when using credentials_file ([5f8a2b4](https://github.com/googleapis/python-memcache/commit/5f8a2b4fe5fcc0c4a2be6b9f8529f4ceacbf6421)) -### [1.2.1](https://www.github.com/googleapis/python-memcache/compare/v1.2.0...v1.2.1) (2021-11-01) +## [1.2.1](https://www.github.com/googleapis/python-memcache/compare/v1.2.0...v1.2.1) (2021-11-01) ### Bug Fixes @@ -40,21 +40,21 @@ * add context manager support in client ([#111](https://www.github.com/googleapis/python-memcache/issues/111)) ([a385b99](https://www.github.com/googleapis/python-memcache/commit/a385b993b2473a01256042cc2c560f872c6b8c13)) -### [1.1.3](https://www.github.com/googleapis/python-memcache/compare/v1.1.2...v1.1.3) (2021-09-30) +## [1.1.3](https://www.github.com/googleapis/python-memcache/compare/v1.1.2...v1.1.3) (2021-09-30) ### Bug Fixes * improper types in pagers generation ([3680bac](https://www.github.com/googleapis/python-memcache/commit/3680bac8c702cc0313b06dbec3c0c6512ac4a58a)) -### [1.1.2](https://www.github.com/googleapis/python-memcache/compare/v1.1.1...v1.1.2) (2021-09-24) +## [1.1.2](https://www.github.com/googleapis/python-memcache/compare/v1.1.1...v1.1.2) (2021-09-24) ### Bug Fixes * add 'dict' annotation type to 'request' ([c56fbee](https://www.github.com/googleapis/python-memcache/commit/c56fbee0ffedac37a80bca5ca3028c53753ada5a)) -### [1.1.1](https://www.github.com/googleapis/python-memcache/compare/v1.1.0...v1.1.1) (2021-07-26) +## [1.1.1](https://www.github.com/googleapis/python-memcache/compare/v1.1.0...v1.1.1) (2021-07-26) ### Bug Fixes From 844a6643d71c5ad38957d656a65066ee287c810f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 6 Jun 2022 17:35:15 -0400 Subject: [PATCH 106/159] chore: test minimum dependencies in python 3.7 (#187) --- testing/constraints-3.7.txt | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index da93009..786e637 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -1,2 +1,9 @@ -# This constraints file is left inentionally empty -# so the latest version of dependencies is installed \ No newline at end of file +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.31.5 +proto-plus==1.15.0 +protobuf==3.19.0 From 88701ac0909f0e544bc71c90131df1e59d09ae65 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Jun 2022 07:59:28 -0400 Subject: [PATCH 107/159] chore(main): release 1.3.2 (#185) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 12 ++++++++++++ setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a76aec2..9090812 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [1.3.2](https://github.com/googleapis/python-memcache/compare/v1.3.1...v1.3.2) (2022-06-06) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#184](https://github.com/googleapis/python-memcache/issues/184)) ([acc06a7](https://github.com/googleapis/python-memcache/commit/acc06a7c8564d272617a66456ac2a002b463443f)) + + +### Documentation + +* fix changelog header to consistent size ([#183](https://github.com/googleapis/python-memcache/issues/183)) ([3647e5f](https://github.com/googleapis/python-memcache/commit/3647e5f70d1d43e388d25f11fee9d730c453732d)) + ## [1.3.1](https://github.com/googleapis/python-memcache/compare/v1.3.0...v1.3.1) (2022-03-05) diff --git a/setup.py b/setup.py index 77332f2..c2847cc 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ import setuptools # type: ignore -version = "1.3.1" +version = "1.3.2" package_root = os.path.abspath(os.path.dirname(__file__)) From 125d004daf48a0e36374118bf9ad815cbbb1cfbd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 12 Jun 2022 11:02:43 -0400 Subject: [PATCH 108/159] chore: add prerelease nox session (#192) Source-Link: https://github.com/googleapis/synthtool/commit/050953d60f71b4ed4be563e032f03c192c50332f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/continuous/prerelease-deps.cfg | 7 +++ .kokoro/presubmit/prerelease-deps.cfg | 7 +++ noxfile.py | 64 ++++++++++++++++++++++++++ 4 files changed, 80 insertions(+), 2 deletions(-) create mode 100644 .kokoro/continuous/prerelease-deps.cfg create mode 100644 .kokoro/presubmit/prerelease-deps.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 757c9dc..2185b59 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 -# created: 2022-05-05T22:08:23.383410683Z + digest: sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 +# created: 2022-06-12T13:11:45.905884945Z diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 0000000..3595fb4 --- /dev/null +++ b/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 0000000..3595fb4 --- /dev/null +++ b/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/noxfile.py b/noxfile.py index 7c1742d..b99ccd3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -325,3 +325,67 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + prerel_deps = [ + "protobuf", + "googleapis-common-protos", + "google-auth", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + # dependencies of google-auth + "cryptography", + "pyasn1", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = ["requests"] + session.install(*other_deps) + + session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Don't overwrite prerelease packages. + deps = [dep for dep in deps if dep not in prerel_deps] + # We use --no-deps to ensure that pre-release versions aren't overwritten + # by the version ranges in setup.py. + session.install(*deps) + session.install("--no-deps", "-e", ".[all]") + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + session.run("py.test", "tests/system") + session.run("py.test", "samples/snippets") From 4ff2af029aebeaa9107998c91384d14dc53958f5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 13 Jun 2022 06:10:58 -0400 Subject: [PATCH 109/159] chore(python): add missing import for prerelease testing (#193) Source-Link: https://github.com/googleapis/synthtool/commit/d2871d98e1e767d4ad49a557ff979236d64361a1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- noxfile.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2185b59..50b29ff 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 -# created: 2022-06-12T13:11:45.905884945Z + digest: sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 +# created: 2022-06-12T16:09:31.61859086Z diff --git a/noxfile.py b/noxfile.py index b99ccd3..5bb3c20 100644 --- a/noxfile.py +++ b/noxfile.py @@ -20,6 +20,7 @@ import os import pathlib +import re import shutil import warnings From bfc330ba0db806ae59a1880414fb6404d78c3ea1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 24 Jun 2022 15:44:11 -0400 Subject: [PATCH 110/159] fix: exclude tests directory in packaging (#195) --- setup.py | 57 +++++++++++++++++++++++++++++++++++++------------------- 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/setup.py b/setup.py index c2847cc..4c0fc46 100644 --- a/setup.py +++ b/setup.py @@ -18,9 +18,21 @@ import io import os -import setuptools # type: ignore +import setuptools +name = "google-cloud-memcache" +description = "Memorystore for Memcached API client library" version = "1.3.2" +release_status = "Development Status :: 5 - Production/Stable" +dependencies = [ + # NOTE: Maintainers, please do not require google-api-core>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "proto-plus >= 1.15.0, <2.0.0dev", + "protobuf >= 3.19.0, <4.0.0dev", +] +url = "https://github.com/googleapis/python-memcache" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -28,38 +40,45 @@ with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + + setuptools.setup( - name="google-cloud-memcache", + name=name, version=version, + description=description, long_description=readme, author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/googleapis/python-memcache", - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=("google", "google.cloud"), - platforms="Posix; MacOS X; Windows", - include_package_data=True, - install_requires=( - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", - "proto-plus >= 1.15.0, <2.0.0dev", - "protobuf >= 3.19.0, <4.0.0dev", - ), - python_requires=">=3.6", + url=url, classifiers=[ - "Development Status :: 5 - Production/Stable", + release_status, "Intended Audience :: Developers", - "Operating System :: OS Independent", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", "Topic :: Internet", - "Topic :: Software Development :: Libraries :: Python Modules", ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.6", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) From a792592877e7ff83d5afe631dcf4d2246d33966c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Jul 2022 12:11:50 -0400 Subject: [PATCH 111/159] fix: require python 3.7+ (#198) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): drop python 3.6 Source-Link: https://github.com/googleapis/synthtool/commit/4f89b13af10d086458f9b379e56a614f9d6dab7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c * add api_description to .repo-metadata.json * require python 3.7+ in setup.py * remove python 3.6 sample configs * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 +- .github/workflows/unittest.yml | 2 +- .kokoro/samples/python3.6/common.cfg | 40 --------- .kokoro/samples/python3.6/continuous.cfg | 7 -- .kokoro/samples/python3.6/periodic-head.cfg | 11 --- .kokoro/samples/python3.6/periodic.cfg | 6 -- .kokoro/samples/python3.6/presubmit.cfg | 6 -- .kokoro/test-samples-impl.sh | 4 +- .repo-metadata.json | 3 +- CONTRIBUTING.rst | 6 +- README.rst | 50 +++++++---- noxfile.py | 85 ++++++++++++------- .../templates/install_deps.tmpl.rst | 2 +- setup.py | 3 +- 14 files changed, 100 insertions(+), 129 deletions(-) delete mode 100644 .kokoro/samples/python3.6/common.cfg delete mode 100644 .kokoro/samples/python3.6/continuous.cfg delete mode 100644 .kokoro/samples/python3.6/periodic-head.cfg delete mode 100644 .kokoro/samples/python3.6/periodic.cfg delete mode 100644 .kokoro/samples/python3.6/presubmit.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 50b29ff..1ce6085 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 -# created: 2022-06-12T16:09:31.61859086Z + digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c +# created: 2022-07-05T18:31:20.838186805Z diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index e5be6ed..5531b01 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg deleted file mode 100644 index 8210984..0000000 --- a/.kokoro/samples/python3.6/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.6" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py36" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg deleted file mode 100644 index 7218af1..0000000 --- a/.kokoro/samples/python3.6/continuous.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg deleted file mode 100644 index aa527a5..0000000 --- a/.kokoro/samples/python3.6/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples-against-head.sh" -} diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg deleted file mode 100644 index 71cd1e5..0000000 --- a/.kokoro/samples/python3.6/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.6/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 8a324c9..2c6500c 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -33,7 +33,7 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +python3.9 -m pip install --upgrade --quiet nox # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then @@ -76,7 +76,7 @@ for file in samples/**/requirements.txt; do echo "------------------------------------------------------------" # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" + python3.9 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. diff --git a/.repo-metadata.json b/.repo-metadata.json index 56dbda8..a1e9712 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -12,5 +12,6 @@ "api_id": "memcache.googleapis.com", "default_version": "v1", "codeowner_team": "", - "api_shortname": "memcache" + "api_shortname": "memcache", + "api_description": "is a fully-managed in-memory data store service for Memcache." } diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 2718ea6..8d6353b 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -221,13 +221,11 @@ Supported Python Versions We support: -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ @@ -239,7 +237,7 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-memcache/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.6. +We also explicitly decided to support Python 3 beginning with version 3.7. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/README.rst b/README.rst index 7ce5ce2..396d543 100644 --- a/README.rst +++ b/README.rst @@ -1,23 +1,22 @@ -Python Client for Cloud Memorystore for Memcached -================================================= +Python Client for Cloud Memorystore for Memcached API +===================================================== -|GA| |pypi| |versions| +|stable| |pypi| |versions| -`Cloud Memorystore for Memached API`_: Ship performant and highly available key value store, -compatible with OSS Memcached protocol. +`Cloud Memorystore for Memcached API`_: is a fully-managed in-memory data store service for Memcache. - `Client Library Documentation`_ - `Product Documentation`_ -.. |GA| image:: https://img.shields.io/badge/support-ga-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-memcache.svg :target: https://pypi.org/project/google-cloud-memcache/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-memcache.svg :target: https://pypi.org/project/google-cloud-memcache/ -.. _Cloud Memorystore for Memached API: https://cloud.google.com/memorystore/docs/memcached/ +.. _Cloud Memorystore for Memcached API: cloud.google.com/memorystore/docs/memcached/ .. _Client Library Documentation: https://cloud.google.com/python/docs/reference/memcache/latest -.. _Product Documentation: https://cloud.google.com/memorystore/docs/memcached/ +.. _Product Documentation: cloud.google.com/memorystore/docs/memcached/ Quick Start ----------- @@ -31,7 +30,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the Cloud Memorystore for Memcached API.: https://cloud.google.com/memorystore/docs/memcached/ +.. _Enable the Cloud Memorystore for Memcached API.: cloud.google.com/memorystore/docs/memcached/ .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation @@ -48,6 +47,27 @@ dependencies. .. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current [active](https://devguide.python.org/devcycle/#in-development-main-branch) and [maintenance](https://devguide.python.org/devcycle/#maintenance-branches) versions of +Python. + +Python >= 3.7 + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an [end-of-life](https://devguide.python.org/devcycle/#end-of-life-branches) +version of Python, we recommend that you update as soon as possible to an actively supported version. + + Mac/Linux ^^^^^^^^^ @@ -72,12 +92,12 @@ Windows Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Cloud Billing API - API to see other available methods on the client. +- Read the `Client Library Documentation`_ for Cloud Memorystore for Memcached API + to see other available methods on the client. - Read the `Cloud Memorystore for Memcached API Product documentation`_ to learn more about the product and see How-to Guides. -- View this `repository’s main README`_ to see the full list of Cloud +- View this `README`_ to see the full list of Cloud APIs that we cover. -.. _Cloud Memorystore for Memcached API Product documentation: https://cloud.google.com/memorystore/docs/memcached/ -.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst \ No newline at end of file +.. _Cloud Memorystore for Memcached API Product documentation: cloud.google.com/memorystore/docs/memcached/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/noxfile.py b/noxfile.py index 5bb3c20..94b2f9c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -32,7 +32,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -332,28 +332,15 @@ def docfx(session): def prerelease_deps(session): """Run all tests with prerelease versions of dependencies installed.""" - prerel_deps = [ - "protobuf", - "googleapis-common-protos", - "google-auth", - "grpcio", - "grpcio-status", - "google-api-core", - "proto-plus", - # dependencies of google-auth - "cryptography", - "pyasn1", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = ["requests"] - session.install(*other_deps) - + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python # version, the first version we test with in the unit tests sessions has a @@ -367,19 +354,44 @@ def prerelease_deps(session): constraints_text = constraints_file.read() # Ignore leading whitespace and comment lines. - deps = [ + constraints_deps = [ match.group(1) for match in re.finditer( r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE ) ] - # Don't overwrite prerelease packages. - deps = [dep for dep in deps if dep not in prerel_deps] - # We use --no-deps to ensure that pre-release versions aren't overwritten - # by the version ranges in setup.py. - session.install(*deps) - session.install("--no-deps", "-e", ".[all]") + session.install(*constraints_deps) + + if os.path.exists("samples/snippets/requirements.txt"): + session.install("-r", "samples/snippets/requirements.txt") + + if os.path.exists("samples/snippets/requirements-test.txt"): + session.install("-r", "samples/snippets/requirements-test.txt") + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) # Print out prerelease package versions session.run( @@ -388,5 +400,16 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("py.test", "tests/unit") - session.run("py.test", "tests/system") - session.run("py.test", "samples/snippets") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): + session.run("py.test", "tests/system") + + snippets_test_path = os.path.join("samples", "snippets") + + # Only run samples tests if found. + if os.path.exists(snippets_test_path): + session.run("py.test", "samples/snippets") diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst index 275d649..6f069c6 100644 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/setup.py b/setup.py index 4c0fc46..777fa3a 100644 --- a/setup.py +++ b/setup.py @@ -66,7 +66,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -76,7 +75,7 @@ ], platforms="Posix; MacOS X; Windows", packages=packages, - python_requires=">=3.6", + python_requires=">=3.7", namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, From 9ef3f98e1fb6d73ff9a0f3a9dc9fd74c60ba8c78 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Jul 2022 16:26:11 +0000 Subject: [PATCH 112/159] fix(deps): require google-api-core >= 2.8.0 (#194) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 459095142 Source-Link: https://github.com/googleapis/googleapis/commit/4f1be992601ed740a581a32cedc4e7b6c6a27793 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ae686d9cde4fc3e36d0ac02efb8643b15890c1ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWU2ODZkOWNkZTRmYzNlMzZkMGFjMDJlZmI4NjQzYjE1ODkwYzFlZCJ9 feat: add audience parameter PiperOrigin-RevId: 456827138 Source-Link: https://github.com/googleapis/googleapis/commit/23f1a157189581734c7a77cddfeb7c5bc1e440ae Source-Link: https://github.com/googleapis/googleapis-gen/commit/4075a8514f676691ec156688a5bbf183aa9893ce Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDA3NWE4NTE0ZjY3NjY5MWVjMTU2Njg4YTViYmYxODNhYTk4OTNjZSJ9 --- .../services/cloud_memcache/client.py | 1 + .../cloud_memcache/transports/base.py | 16 ++++-- .../cloud_memcache/transports/grpc.py | 2 + .../cloud_memcache/transports/grpc_asyncio.py | 2 + .../services/cloud_memcache/client.py | 1 + .../cloud_memcache/transports/base.py | 16 ++++-- .../cloud_memcache/transports/grpc.py | 2 + .../cloud_memcache/transports/grpc_asyncio.py | 2 + setup.py | 5 +- testing/constraints-3.6.txt | 9 ---- testing/constraints-3.7.txt | 2 +- .../gapic/memcache_v1/test_cloud_memcache.py | 52 +++++++++++++++++++ .../memcache_v1beta2/test_cloud_memcache.py | 52 +++++++++++++++++++ 13 files changed, 138 insertions(+), 24 deletions(-) delete mode 100644 testing/constraints-3.6.txt diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index f1f1153..af3e085 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -448,6 +448,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def list_instances( diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 2937a77..e02460b 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -55,6 +55,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -82,11 +83,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -107,6 +103,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -119,6 +120,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index 861f134..a135b9a 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -76,6 +76,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -172,6 +173,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index 6d6f578..3430e39 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -121,6 +121,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -217,6 +218,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index a9ffe7e..6726ae2 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -448,6 +448,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def list_instances( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index e19d086..a6e6a4a 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -55,6 +55,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -82,11 +83,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -107,6 +103,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -119,6 +120,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index b5fffbc..4be1255 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -76,6 +76,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -172,6 +173,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index 7c00609..d0310f8 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -121,6 +121,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -217,6 +218,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/setup.py b/setup.py index 777fa3a..0140762 100644 --- a/setup.py +++ b/setup.py @@ -25,10 +25,7 @@ version = "1.3.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "google-api-core[grpc] >= 2.8.0, <3.0.0dev", "proto-plus >= 1.15.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", ] diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt deleted file mode 100644 index 786e637..0000000 --- a/testing/constraints-3.6.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.31.5 -proto-plus==1.15.0 -protobuf==3.19.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 786e637..6522c7e 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.31.5 +google-api-core==2.8.0 proto-plus==1.15.0 protobuf==3.19.0 diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index aae4115..59e9441 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -235,6 +235,7 @@ def test_cloud_memcache_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -252,6 +253,7 @@ def test_cloud_memcache_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -269,6 +271,7 @@ def test_cloud_memcache_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -298,6 +301,25 @@ def test_cloud_memcache_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -365,6 +387,7 @@ def test_cloud_memcache_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -399,6 +422,7 @@ def test_cloud_memcache_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -421,6 +445,7 @@ def test_cloud_memcache_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -535,6 +560,7 @@ def test_cloud_memcache_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -573,6 +599,7 @@ def test_cloud_memcache_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -593,6 +620,7 @@ def test_cloud_memcache_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -631,6 +659,7 @@ def test_cloud_memcache_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2794,6 +2823,28 @@ def test_cloud_memcache_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -3294,4 +3345,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 5867efe..6d67b31 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -235,6 +235,7 @@ def test_cloud_memcache_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -252,6 +253,7 @@ def test_cloud_memcache_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -269,6 +271,7 @@ def test_cloud_memcache_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -298,6 +301,25 @@ def test_cloud_memcache_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -365,6 +387,7 @@ def test_cloud_memcache_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -399,6 +422,7 @@ def test_cloud_memcache_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -421,6 +445,7 @@ def test_cloud_memcache_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -535,6 +560,7 @@ def test_cloud_memcache_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -573,6 +599,7 @@ def test_cloud_memcache_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -593,6 +620,7 @@ def test_cloud_memcache_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -631,6 +659,7 @@ def test_cloud_memcache_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -3060,6 +3089,28 @@ def test_cloud_memcache_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -3560,4 +3611,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) From 9f0bf8f863c7a6f90ce9ac737e2d71cdd8421b22 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 6 Jul 2022 13:09:10 -0400 Subject: [PATCH 113/159] chore(main): release 1.4.0 (#196) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 14 ++++++++++++++ setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9090812..baa775e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.4.0](https://github.com/googleapis/python-memcache/compare/v1.3.2...v1.4.0) (2022-07-06) + + +### Features + +* add audience parameter ([9ef3f98](https://github.com/googleapis/python-memcache/commit/9ef3f98e1fb6d73ff9a0f3a9dc9fd74c60ba8c78)) + + +### Bug Fixes + +* **deps:** require google-api-core >= 2.8.0 ([#194](https://github.com/googleapis/python-memcache/issues/194)) ([9ef3f98](https://github.com/googleapis/python-memcache/commit/9ef3f98e1fb6d73ff9a0f3a9dc9fd74c60ba8c78)) +* exclude tests directory in packaging ([#195](https://github.com/googleapis/python-memcache/issues/195)) ([bfc330b](https://github.com/googleapis/python-memcache/commit/bfc330ba0db806ae59a1880414fb6404d78c3ea1)) +* require python 3.7+ ([#198](https://github.com/googleapis/python-memcache/issues/198)) ([a792592](https://github.com/googleapis/python-memcache/commit/a792592877e7ff83d5afe631dcf4d2246d33966c)) + ## [1.3.2](https://github.com/googleapis/python-memcache/compare/v1.3.1...v1.3.2) (2022-06-06) diff --git a/setup.py b/setup.py index 0140762..b11f528 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-memcache" description = "Memorystore for Memcached API client library" -version = "1.3.2" +version = "1.4.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 2.8.0, <3.0.0dev", From aa7978edd9b6fbe831775622ed3066e39112c2b1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Jul 2022 12:53:19 -0400 Subject: [PATCH 114/159] fix(deps): require google-api-core>=1.32.0,>=2.8.0 (#199) * fix(deps): require google-api-core>=1.32.0,>=2.8.0 * chore: update constraints --- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index b11f528..1f93ff1 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ version = "1.4.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 2.8.0, <3.0.0dev", + "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.15.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", ] diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 6522c7e..f61f150 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==2.8.0 +google-api-core==1.32.0 proto-plus==1.15.0 protobuf==3.19.0 From ea1fb1d595fc8a7deb75efdd9ebf3a956347909b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 13 Jul 2022 13:07:03 -0400 Subject: [PATCH 115/159] chore(main): release 1.4.1 (#200) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index baa775e..498017b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.4.1](https://github.com/googleapis/python-memcache/compare/v1.4.0...v1.4.1) (2022-07-13) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#199](https://github.com/googleapis/python-memcache/issues/199)) ([aa7978e](https://github.com/googleapis/python-memcache/commit/aa7978edd9b6fbe831775622ed3066e39112c2b1)) + ## [1.4.0](https://github.com/googleapis/python-memcache/compare/v1.3.2...v1.4.0) (2022-07-06) diff --git a/setup.py b/setup.py index 1f93ff1..aa1dfcb 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-memcache" description = "Memorystore for Memcached API client library" -version = "1.4.0" +version = "1.4.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", From 9a03f0b61288eb5fbf2f7f7d295973fc70cff951 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Jul 2022 22:41:56 -0400 Subject: [PATCH 116/159] chore(python): allow client documentation to be customized in README (#202) Source-Link: https://github.com/googleapis/synthtool/commit/95d9289ac3dc1ca2edae06619c82fe7a24d555f1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- README.rst | 8 ++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 1ce6085..58fcbee 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c -# created: 2022-07-05T18:31:20.838186805Z + digest: sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 +# created: 2022-07-14T01:58:16.015625351Z diff --git a/README.rst b/README.rst index 396d543..0be0c55 100644 --- a/README.rst +++ b/README.rst @@ -55,18 +55,22 @@ Code samples and snippets live in the `samples/` folder. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Our client libraries are compatible with all current [active](https://devguide.python.org/devcycle/#in-development-main-branch) and [maintenance](https://devguide.python.org/devcycle/#maintenance-branches) versions of +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. Python >= 3.7 +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Python <= 3.6 -If you are using an [end-of-life](https://devguide.python.org/devcycle/#end-of-life-branches) +If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches Mac/Linux ^^^^^^^^^ From 4199f0412e25742641a4388596dea406f1fc6828 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 26 Jul 2022 00:50:11 +0000 Subject: [PATCH 117/159] chore: resolve issue with prerelease presubmit [autoapprove] (#203) Source-Link: https://github.com/googleapis/synthtool/commit/1b9ad7694e44ddb4d9844df55ff7af77b51a4435 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 --- .github/.OwlBot.lock.yaml | 4 ++-- noxfile.py | 33 ++++++++++++++++++--------------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 58fcbee..0eb02fd 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 -# created: 2022-07-14T01:58:16.015625351Z + digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 +# created: 2022-07-25T16:02:49.174178716Z diff --git a/noxfile.py b/noxfile.py index 94b2f9c..cc39f3b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -334,7 +334,8 @@ def prerelease_deps(session): # Install all dependencies session.install("-e", ".[all, tests, tracing]") - session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES @@ -363,12 +364,6 @@ def prerelease_deps(session): session.install(*constraints_deps) - if os.path.exists("samples/snippets/requirements.txt"): - session.install("-r", "samples/snippets/requirements.txt") - - if os.path.exists("samples/snippets/requirements-test.txt"): - session.install("-r", "samples/snippets/requirements-test.txt") - prerel_deps = [ "protobuf", # dependency of grpc @@ -405,11 +400,19 @@ def prerelease_deps(session): system_test_folder_path = os.path.join("tests", "system") # Only run system tests if found. - if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): - session.run("py.test", "tests/system") - - snippets_test_path = os.path.join("samples", "snippets") - - # Only run samples tests if found. - if os.path.exists(snippets_test_path): - session.run("py.test", "samples/snippets") + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) From 318f384da402cd5b95efb025b9a05fbb6eeee743 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Jul 2022 21:34:55 -0400 Subject: [PATCH 118/159] chore(bazel): update protobuf to v3.21.3 (#204) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(bazel): update protobuf to v3.21.3 chore(bazel): update gax-java to 2.18.4 PiperOrigin-RevId: 463115700 Source-Link: https://github.com/googleapis/googleapis/commit/52130a9c3c289e6bc4ab1784bdde6081abdf3dd9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6a4d9d9bb3afb20b0f5fa4f5d9f6740b1d0eb19a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmE0ZDlkOWJiM2FmYjIwYjBmNWZhNGY1ZDlmNjc0MGIxZDBlYjE5YSJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../cloud/memcache_v1/services/cloud_memcache/async_client.py | 3 --- google/cloud/memcache_v1/services/cloud_memcache/client.py | 3 --- .../memcache_v1beta2/services/cloud_memcache/async_client.py | 3 --- .../cloud/memcache_v1beta2/services/cloud_memcache/client.py | 3 --- 4 files changed, 12 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 3fc0230..9832f7f 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -908,9 +908,6 @@ async def sample_delete_instance(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index af3e085..e467d54 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -1131,9 +1131,6 @@ def sample_delete_instance(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index da31dc5..1c4ac2f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -911,9 +911,6 @@ async def sample_delete_instance(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 6726ae2..879fd77 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -1134,9 +1134,6 @@ def sample_delete_instance(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have From 7ea352e3c74f15181df1d3dd36c062bd5ec46b42 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Aug 2022 16:14:22 +0000 Subject: [PATCH 119/159] chore(deps): update actions/setup-python action to v4 [autoapprove] (#206) Source-Link: https://github.com/googleapis/synthtool/commit/8e55b327bae44b6640c7ab4be91df85fc4d6fe8a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/docs.yml | 4 ++-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 0eb02fd..c701359 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 -# created: 2022-07-25T16:02:49.174178716Z + digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 +# created: 2022-08-09T15:58:56.463048506Z diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index b46d730..7092a13 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f512a49..d2aee5b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 5531b01..87ade4d 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install coverage From 8a21a069eae8ad4e3b0f33012d1f50cf547baafd Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 11 Aug 2022 14:56:17 -0400 Subject: [PATCH 120/159] fix(deps): allow protobuf < 5.0.0 (#207) fix(deps): require proto-plus >= 1.22.0 --- setup.py | 4 ++-- testing/constraints-3.7.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index aa1dfcb..3e2f55f 100644 --- a/setup.py +++ b/setup.py @@ -26,8 +26,8 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", - "proto-plus >= 1.15.0, <2.0.0dev", - "protobuf >= 3.19.0, <4.0.0dev", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf >= 3.19.0, <5.0.0dev", ] url = "https://github.com/googleapis/python-memcache" diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index f61f150..810c7cb 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -5,5 +5,5 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.32.0 -proto-plus==1.15.0 +proto-plus==1.22.0 protobuf==3.19.0 From 6a6acd59c7194b5fc02c5b3653ca8912e2906927 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 16 Aug 2022 10:45:55 -0400 Subject: [PATCH 121/159] chore(main): release 1.4.2 (#208) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 8 ++++++++ setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 498017b..67537fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.4.2](https://github.com/googleapis/python-memcache/compare/v1.4.1...v1.4.2) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#207](https://github.com/googleapis/python-memcache/issues/207)) ([8a21a06](https://github.com/googleapis/python-memcache/commit/8a21a069eae8ad4e3b0f33012d1f50cf547baafd)) +* **deps:** require proto-plus >= 1.22.0 ([8a21a06](https://github.com/googleapis/python-memcache/commit/8a21a069eae8ad4e3b0f33012d1f50cf547baafd)) + ## [1.4.1](https://github.com/googleapis/python-memcache/compare/v1.4.0...v1.4.1) (2022-07-13) diff --git a/setup.py b/setup.py index 3e2f55f..02cf890 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-memcache" description = "Memorystore for Memcached API client library" -version = "1.4.1" +version = "1.4.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", From 9db27eeba9ec1cc9494374ba8a114e5e50b4a65b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 24 Aug 2022 15:42:27 +0000 Subject: [PATCH 122/159] chore: remove 'pip install' statements from python_library templates [autoapprove] (#210) Source-Link: https://github.com/googleapis/synthtool/commit/1f37ce74cbc4897f35c9ba5c40393b102da913b1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8e84e0e0d71a0d681668461bba02c9e1394c785f31a10ae3470660235b673086 --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/publish-docs.sh | 4 +- .kokoro/release.sh | 5 +- .kokoro/requirements.in | 8 + .kokoro/requirements.txt | 464 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 476 insertions(+), 9 deletions(-) create mode 100644 .kokoro/requirements.in create mode 100644 .kokoro/requirements.txt diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c701359..1c14d7f 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 -# created: 2022-08-09T15:58:56.463048506Z + digest: sha256:8e84e0e0d71a0d681668461bba02c9e1394c785f31a10ae3470660235b673086 +# created: 2022-08-24T15:24:05.205983455Z diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 8acb14e..1c4d623 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -21,14 +21,12 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --user --upgrade --quiet nox +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m nox --version # build docs nox -s docs -python3 -m pip install --user gcp-docuploader - # create metadata python3 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 5131ab8..3cd3936 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -16,12 +16,9 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install gcp-releasetool +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in new file mode 100644 index 0000000..7718391 --- /dev/null +++ b/.kokoro/requirements.in @@ -0,0 +1,8 @@ +gcp-docuploader +gcp-releasetool +importlib-metadata +typing-extensions +twine +wheel +setuptools +nox \ No newline at end of file diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt new file mode 100644 index 0000000..c4b824f --- /dev/null +++ b/.kokoro/requirements.txt @@ -0,0 +1,464 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==2.0.0 \ + --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ + --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e + # via nox +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via gcp-releasetool +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +cachetools==5.2.0 \ + --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ + --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db + # via google-auth +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb + # via + # gcp-docuploader + # gcp-releasetool +colorlog==6.6.0 \ + --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ + --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e + # via + # gcp-docuploader + # nox +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +cryptography==37.0.4 \ + --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ + --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ + --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ + --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ + --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ + --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ + --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ + --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ + --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ + --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ + --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ + --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ + --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ + --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ + --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ + --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ + --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ + --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ + --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ + --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ + --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ + --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 + # via + # gcp-releasetool + # secretstorage +distlib==0.3.5 \ + --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ + --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c + # via virtualenv +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via readme-renderer +filelock==3.8.0 \ + --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ + --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 + # via virtualenv +gcp-docuploader==0.6.3 \ + --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ + --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b + # via -r requirements.in +gcp-releasetool==1.8.6 \ + --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ + --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 + # via -r requirements.in +google-api-core==2.8.2 \ + --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ + --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.11.0 \ + --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ + --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.3.2 \ + --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ + --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a + # via google-cloud-storage +google-cloud-storage==2.5.0 \ + --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ + --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 + # via gcp-docuploader +google-crc32c==1.3.0 \ + --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ + --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ + --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ + --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ + --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ + --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ + --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ + --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ + --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ + --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ + --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ + --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ + --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ + --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ + --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ + --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ + --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ + --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ + --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ + --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ + --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ + --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ + --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ + --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ + --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ + --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ + --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ + --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ + --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ + --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ + --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ + --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ + --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ + --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ + --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ + --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ + --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ + --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ + --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ + --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ + --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ + --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ + --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 + # via google-resumable-media +google-resumable-media==2.3.3 \ + --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ + --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 + # via google-cloud-storage +googleapis-common-protos==1.56.4 \ + --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ + --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 + # via google-api-core +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==4.12.0 \ + --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ + --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 + # via + # -r requirements.in + # twine +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via gcp-releasetool +keyring==23.8.2 \ + --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ + --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a + # via + # gcp-releasetool + # twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via jinja2 +nox==2022.8.7 \ + --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ + --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c + # via -r requirements.in +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # gcp-releasetool + # nox +pkginfo==1.8.3 \ + --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ + --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c + # via twine +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via virtualenv +protobuf==3.20.1 \ + --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ + --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ + --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ + --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ + --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ + --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ + --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ + --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ + --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ + --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ + --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ + --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ + --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ + --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ + --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ + --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ + --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ + --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ + --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ + --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ + --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ + --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ + --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ + --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via nox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich +pyjwt==2.4.0 \ + --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ + --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba + # via gcp-releasetool +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyperclip==1.8.2 \ + --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 + # via gcp-releasetool +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via gcp-releasetool +readme-renderer==37.0 \ + --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ + --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.5.1 \ + --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ + --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca + # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # bleach + # gcp-docuploader + # google-auth + # python-dateutil +twine==4.0.1 \ + --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ + --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 + # via -r requirements.in +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via -r requirements.in +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # twine +virtualenv==20.16.3 \ + --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ + --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 + # via nox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements.in +zipp==3.8.1 \ + --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ + --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +setuptools==65.2.0 \ + --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ + --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 + # via -r requirements.in From df1dec4b8b0557ad626bc14587537ecc50666be9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 24 Aug 2022 17:30:16 +0000 Subject: [PATCH 123/159] chore(python): exclude path in renovate.json [autoapprove] (#212) Source-Link: https://github.com/googleapis/synthtool/commit/69fabaee9eca28af7ecaa02c86895e606fbbebd6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 --- .github/.OwlBot.lock.yaml | 4 ++-- renovate.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 1c14d7f..c6acdf3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8e84e0e0d71a0d681668461bba02c9e1394c785f31a10ae3470660235b673086 -# created: 2022-08-24T15:24:05.205983455Z + digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 +# created: 2022-08-24T17:07:22.006876712Z diff --git a/renovate.json b/renovate.json index c21036d..566a70f 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From a4e8e6f75c9e74b68dfae2b3179f60373be016ec Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 29 Aug 2022 18:04:35 +0000 Subject: [PATCH 124/159] chore(python): exclude grpcio==1.49.0rc1 in tests [autoapprove] (#213) Source-Link: https://github.com/googleapis/synthtool/commit/c4dd5953003d13b239f872d329c3146586bb417e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 6 +++--- noxfile.py | 7 +++++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c6acdf3..23e106b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 -# created: 2022-08-24T17:07:22.006876712Z + digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 +# created: 2022-08-29T17:28:30.441852797Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index c4b824f..4b29ef2 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -136,9 +136,9 @@ cryptography==37.0.4 \ # via # gcp-releasetool # secretstorage -distlib==0.3.5 \ - --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ - --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e # via virtualenv docutils==0.19 \ --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ diff --git a/noxfile.py b/noxfile.py index cc39f3b..ffe9f35 100644 --- a/noxfile.py +++ b/noxfile.py @@ -189,7 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") + # Exclude version 1.49.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/pull/30642 + session.install("--pre", "grpcio!=1.49.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -369,7 +371,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - "grpcio", + # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 + "grpcio!=1.49.0rc1", "grpcio-status", "google-api-core", "proto-plus", From b22f516cbaac7a33514bf62da5c7c8daf1005442 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 18:46:32 +0000 Subject: [PATCH 125/159] ci(python): fix path to requirements.txt in release script (#214) Source-Link: https://github.com/googleapis/synthtool/commit/fdba3ed145bdb2f4f3eff434d4284b1d03b80d34 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 --- .github/.OwlBot.lock.yaml | 3 +-- .kokoro/release.sh | 2 +- .kokoro/requirements.txt | 24 ++++++++++++------------ 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 23e106b..0d9eb2a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 -# created: 2022-08-29T17:28:30.441852797Z + digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 3cd3936..73c1295 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -16,7 +16,7 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install --require-hashes -r .kokoro/requirements.txt +python3 -m pip install --require-hashes -r github/python-memcache/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script # Disable buffering, so that the logs stream through. diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 4b29ef2..92b2f72 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -100,9 +100,9 @@ click==8.0.4 \ # via # gcp-docuploader # gcp-releasetool -colorlog==6.6.0 \ - --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ - --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 # via # gcp-docuploader # nox @@ -152,9 +152,9 @@ gcp-docuploader==0.6.3 \ --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b # via -r requirements.in -gcp-releasetool==1.8.6 \ - --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ - --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 +gcp-releasetool==1.8.7 \ + --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ + --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d # via -r requirements.in google-api-core==2.8.2 \ --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ @@ -251,9 +251,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.8.2 \ - --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ - --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a +keyring==23.9.0 \ + --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ + --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db # via # gcp-releasetool # twine @@ -440,9 +440,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.3 \ - --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ - --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 +virtualenv==20.16.4 \ + --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ + --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ From 273128e955e5d9db67538937aa1342dc8518e031 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 00:38:39 +0000 Subject: [PATCH 126/159] chore(python): update .kokoro/requirements.txt (#215) Source-Link: https://github.com/googleapis/synthtool/commit/703554a14c7479542335b62fa69279f93a9e38ec Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 0d9eb2a..2fa0f7c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 + digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 92b2f72..385f2d4 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -241,6 +241,10 @@ importlib-metadata==4.12.0 \ # via # -r requirements.in # twine +jaraco-classes==3.2.2 \ + --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ + --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 + # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 @@ -299,6 +303,10 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 +more-itertools==8.14.0 \ + --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ + --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 + # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c From 3250a3a6ace0aa40d25a16a77fb32c08d07afc13 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 18:48:24 +0000 Subject: [PATCH 127/159] chore(python): exclude setup.py in renovate config (#217) Source-Link: https://github.com/googleapis/synthtool/commit/56da63e80c384a871356d1ea6640802017f213b4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 --- .github/.OwlBot.lock.yaml | 2 +- renovate.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2fa0f7c..b8dcb4a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b + digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 diff --git a/renovate.json b/renovate.json index 566a70f..39b2a0e 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From d84c0b04ba586dceb7a4a9474a256c64e8e103ae Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Sep 2022 14:22:31 +0000 Subject: [PATCH 128/159] chore: Bump gapic-generator-python version to 1.3.0 (#218) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472561635 Source-Link: https://github.com/googleapis/googleapis/commit/332ecf599f8e747d8d1213b77ae7db26eff12814 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4313d682880fd9d7247291164d4e9d3d5bd9f177 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDMxM2Q2ODI4ODBmZDlkNzI0NzI5MTE2NGQ0ZTlkM2Q1YmQ5ZjE3NyJ9 --- .../services/cloud_memcache/async_client.py | 49 +++ .../services/cloud_memcache/client.py | 49 +++ .../services/cloud_memcache/async_client.py | 56 +++ .../services/cloud_memcache/client.py | 56 +++ mypy.ini | 2 +- ...d_cloud_memcache_apply_parameters_async.py | 7 + ...ed_cloud_memcache_apply_parameters_sync.py | 7 + ...ed_cloud_memcache_create_instance_async.py | 7 + ...ted_cloud_memcache_create_instance_sync.py | 7 + ...ed_cloud_memcache_delete_instance_async.py | 7 + ...ted_cloud_memcache_delete_instance_sync.py | 7 + ...rated_cloud_memcache_get_instance_async.py | 7 + ...erated_cloud_memcache_get_instance_sync.py | 7 + ...ted_cloud_memcache_list_instances_async.py | 7 + ...ated_cloud_memcache_list_instances_sync.py | 7 + ...ed_cloud_memcache_update_instance_async.py | 7 + ...ted_cloud_memcache_update_instance_sync.py | 7 + ..._cloud_memcache_update_parameters_async.py | 7 + ...d_cloud_memcache_update_parameters_sync.py | 7 + ...d_cloud_memcache_apply_parameters_async.py | 7 + ...ed_cloud_memcache_apply_parameters_sync.py | 7 + ...ud_memcache_apply_software_update_async.py | 7 + ...oud_memcache_apply_software_update_sync.py | 7 + ...ed_cloud_memcache_create_instance_async.py | 7 + ...ted_cloud_memcache_create_instance_sync.py | 7 + ...ed_cloud_memcache_delete_instance_async.py | 7 + ...ted_cloud_memcache_delete_instance_sync.py | 7 + ...rated_cloud_memcache_get_instance_async.py | 7 + ...erated_cloud_memcache_get_instance_sync.py | 7 + ...ted_cloud_memcache_list_instances_async.py | 7 + ...ated_cloud_memcache_list_instances_sync.py | 7 + ...ed_cloud_memcache_update_instance_async.py | 7 + ...ted_cloud_memcache_update_instance_sync.py | 7 + ..._cloud_memcache_update_parameters_async.py | 7 + ...d_cloud_memcache_update_parameters_sync.py | 7 + .../snippet_metadata_memcache_v1.json | 280 +++++++-------- .../snippet_metadata_memcache_v1beta2.json | 320 +++++++++--------- 37 files changed, 721 insertions(+), 301 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 9832f7f..b848c77 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -241,6 +241,13 @@ async def list_instances( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 async def sample_list_instances(): @@ -351,6 +358,13 @@ async def get_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 async def sample_get_instance(): @@ -448,6 +462,13 @@ async def create_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 async def sample_create_instance(): @@ -595,6 +616,13 @@ async def update_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 async def sample_update_instance(): @@ -727,6 +755,13 @@ async def update_parameters( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 async def sample_update_parameters(): @@ -853,6 +888,13 @@ async def delete_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 async def sample_delete_instance(): @@ -976,6 +1018,13 @@ async def apply_parameters( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 async def sample_apply_parameters(): diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index e467d54..b182f15 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -464,6 +464,13 @@ def list_instances( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 def sample_list_instances(): @@ -574,6 +581,13 @@ def get_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 def sample_get_instance(): @@ -671,6 +685,13 @@ def create_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 def sample_create_instance(): @@ -818,6 +839,13 @@ def update_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 def sample_update_instance(): @@ -950,6 +978,13 @@ def update_parameters( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 def sample_update_parameters(): @@ -1076,6 +1111,13 @@ def delete_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 def sample_delete_instance(): @@ -1199,6 +1241,13 @@ def apply_parameters( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 def sample_apply_parameters(): diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 1c4ac2f..0c5c5cb 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -241,6 +241,13 @@ async def list_instances( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 async def sample_list_instances(): @@ -351,6 +358,13 @@ async def get_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 async def sample_get_instance(): @@ -448,6 +462,13 @@ async def create_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 async def sample_create_instance(): @@ -596,6 +617,13 @@ async def update_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 async def sample_update_instance(): @@ -729,6 +757,13 @@ async def update_parameters( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 async def sample_update_parameters(): @@ -856,6 +891,13 @@ async def delete_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 async def sample_delete_instance(): @@ -979,6 +1021,13 @@ async def apply_parameters( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 async def sample_apply_parameters(): @@ -1113,6 +1162,13 @@ async def apply_software_update( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 async def sample_apply_software_update(): diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 879fd77..83d9691 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -464,6 +464,13 @@ def list_instances( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 def sample_list_instances(): @@ -574,6 +581,13 @@ def get_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 def sample_get_instance(): @@ -671,6 +685,13 @@ def create_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 def sample_create_instance(): @@ -819,6 +840,13 @@ def update_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 def sample_update_instance(): @@ -952,6 +980,13 @@ def update_parameters( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 def sample_update_parameters(): @@ -1079,6 +1114,13 @@ def delete_instance( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 def sample_delete_instance(): @@ -1202,6 +1244,13 @@ def apply_parameters( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 def sample_apply_parameters(): @@ -1336,6 +1385,13 @@ def apply_software_update( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 def sample_apply_software_update(): diff --git a/mypy.ini b/mypy.ini index 4505b48..574c5ae 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py index dccf400..0bfffad 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_ApplyParameters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py index f6a6bd9..4856300 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_ApplyParameters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py index 44f2f0b..58b8f08 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py index 3be4489..b3e3779 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py index 754d26f..b6a4f6c 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py index 767510e..07a52fb 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py index f562215..83a14bb 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py index c470181..968c860 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py index 7516120..0b03e65 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py index c5ee139..33af66e 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py index e078aeb..7db284c 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py index 711c064..bfe1965 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py index 9020743..231298f 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_UpdateParameters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py index 4044531..c78b667 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1_generated_CloudMemcache_UpdateParameters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py index eef58f6..3504b1b 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py index 159a37c..5e38aaf 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py index 7cdcda9..0dc515a 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py index f3d7fef..1a06e32 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py index aa17117..ca5e8de 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py index f1ec09e..68d4a3b 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py index f7a715b..3ee0113 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py index 497447a..af3c4b3 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py index a17c364..16fbc6d 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py index aa391b1..b0ff048 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py index 860b2f0..e2feb2d 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py index a4d39a4..96eec1b 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py index a7d90c8..80ce316 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py index e3e4e0c..f1e71df 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py index 6ddba29..3e50251 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py index bea149a..00013aa 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py @@ -24,6 +24,13 @@ # [START memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import memcache_v1beta2 diff --git a/samples/generated_samples/snippet_metadata_memcache_v1.json b/samples/generated_samples/snippet_metadata_memcache_v1.json index 754496a..52b01b4 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1.json +++ b/samples/generated_samples/snippet_metadata_memcache_v1.json @@ -67,33 +67,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_ApplyParameters_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -155,33 +155,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_ApplyParameters_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -244,33 +244,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_CreateInstance_async", "segments": [ { - "end": 56, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 63, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 34, + "end": 53, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 60, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 64, + "start": 61, "type": "RESPONSE_HANDLING" } ], @@ -332,33 +332,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_CreateInstance_sync", "segments": [ { - "end": 56, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 63, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 34, + "end": 53, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 60, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 64, + "start": 61, "type": "RESPONSE_HANDLING" } ], @@ -413,33 +413,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_DeleteInstance_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -493,33 +493,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_DeleteInstance_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -574,33 +574,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_GetInstance_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -654,33 +654,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_GetInstance_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -735,33 +735,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_ListInstances_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -815,33 +815,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_ListInstances_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -900,33 +900,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_UpdateInstance_async", "segments": [ { - "end": 54, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 61, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 44, - "start": 34, + "end": 51, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -984,33 +984,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_UpdateInstance_sync", "segments": [ { - "end": 54, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 61, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 44, - "start": 34, + "end": 51, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -1073,33 +1073,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_UpdateParameters_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -1161,33 +1161,33 @@ "regionTag": "memcache_v1_generated_CloudMemcache_UpdateParameters_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], diff --git a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json b/samples/generated_samples/snippet_metadata_memcache_v1beta2.json index d2dd1a7..e67f9f6 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json +++ b/samples/generated_samples/snippet_metadata_memcache_v1beta2.json @@ -67,33 +67,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -155,33 +155,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -244,33 +244,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -332,33 +332,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -421,33 +421,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_CreateInstance_async", "segments": [ { - "end": 56, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 63, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 34, + "end": 53, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 60, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 64, + "start": 61, "type": "RESPONSE_HANDLING" } ], @@ -509,33 +509,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync", "segments": [ { - "end": 56, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 63, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 34, + "end": 53, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 60, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 64, + "start": 61, "type": "RESPONSE_HANDLING" } ], @@ -590,33 +590,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -670,33 +670,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -751,33 +751,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_GetInstance_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -831,33 +831,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_GetInstance_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -912,33 +912,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_ListInstances_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -992,33 +992,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_ListInstances_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1077,33 +1077,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async", "segments": [ { - "end": 54, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 61, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 44, - "start": 34, + "end": 51, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -1161,33 +1161,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync", "segments": [ { - "end": 54, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 61, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 44, - "start": 34, + "end": 51, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -1250,33 +1250,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -1338,33 +1338,33 @@ "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], From 41c940a0915f505fc5039a10b914bd4ce89e48bb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Sep 2022 13:54:34 +0000 Subject: [PATCH 129/159] chore: use gapic-generator-python 1.3.1 (#219) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472772457 Source-Link: https://github.com/googleapis/googleapis/commit/855b74d203deeb0f7a0215f9454cdde62a1f9b86 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b64b1e7da3e138f15ca361552ef0545e54891b4f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjY0YjFlN2RhM2UxMzhmMTVjYTM2MTU1MmVmMDU0NWU1NDg5MWI0ZiJ9 --- tests/unit/gapic/memcache_v1/test_cloud_memcache.py | 4 ++-- tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 59e9441..33cf4e8 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import math diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 6d67b31..545374c 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import math From 857507df4a89a7c83065a7c71d1f00431acc8d63 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 14:38:19 +0000 Subject: [PATCH 130/159] chore: use gapic generator python 1.4.1 (#220) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 473833416 Source-Link: https://github.com/googleapis/googleapis/commit/565a5508869557a3228b871101e4e4ebd8f93d11 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ee1a06c6de3ca8b843572c1fde0548f84236989 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWVlMWEwNmM2ZGUzY2E4Yjg0MzU3MmMxZmRlMDU0OGY4NDIzNjk4OSJ9 --- tests/unit/gapic/memcache_v1/test_cloud_memcache.py | 1 + tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py | 1 + 2 files changed, 2 insertions(+) diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 33cf4e8..0817bd9 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -45,6 +45,7 @@ from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 545374c..c45ad19 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -45,6 +45,7 @@ from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest From 2394f7477a593b9c7271a581f02f8d570160a23d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 29 Sep 2022 15:33:54 -0400 Subject: [PATCH 131/159] fix(deps): require protobuf >= 3.20.2 (#222) * chore: exclude requirements.txt file from renovate-bot Source-Link: https://github.com/googleapis/synthtool/commit/f58d3135a2fab20e225d98741dbc06d57459b816 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 * update constraints files * fix(deps): require protobuf 3.20.2 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 49 ++++++++++++++++++------------------- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- 4 files changed, 27 insertions(+), 28 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b8dcb4a..3815c98 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 + digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 385f2d4..d15994b 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -325,31 +325,30 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.1 \ - --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ - --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ - --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ - --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ - --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ - --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ - --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ - --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ - --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ - --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ - --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ - --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ - --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ - --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ - --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ - --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ - --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ - --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ - --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ - --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ - --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ - --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ - --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ - --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 +protobuf==3.20.2 \ + --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ + --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ + --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ + --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ + --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ + --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ + --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ + --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ + --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ + --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ + --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ + --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ + --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ + --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ + --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ + --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ + --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ + --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ + --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ + --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ + --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ + --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ + --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 # via # gcp-docuploader # gcp-releasetool diff --git a/setup.py b/setup.py index 02cf890..637776e 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ dependencies = [ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.19.0, <5.0.0dev", + "protobuf >= 3.20.2, <5.0.0dev", ] url = "https://github.com/googleapis/python-memcache" diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 810c7cb..df8564d 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -6,4 +6,4 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.32.0 proto-plus==1.22.0 -protobuf==3.19.0 +protobuf==3.20.2 From 29218ae4a53a53ea106a45213c52ce74679a59a2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 29 Sep 2022 16:02:32 -0400 Subject: [PATCH 132/159] chore(main): release 1.4.3 (#223) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 67537fc..17a0172 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.4.3](https://github.com/googleapis/python-memcache/compare/v1.4.2...v1.4.3) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#222](https://github.com/googleapis/python-memcache/issues/222)) ([2394f74](https://github.com/googleapis/python-memcache/commit/2394f7477a593b9c7271a581f02f8d570160a23d)) + ## [1.4.2](https://github.com/googleapis/python-memcache/compare/v1.4.1...v1.4.2) (2022-08-11) diff --git a/setup.py b/setup.py index 637776e..1445b2d 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-memcache" description = "Memorystore for Memcached API client library" -version = "1.4.2" +version = "1.4.3" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", From 90a04d303717f9a3decf88fc7516e788f57c2a2f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 7 Oct 2022 17:35:14 -0400 Subject: [PATCH 133/159] fix(deps): allow protobuf 3.19.5 (#224) * fix(deps): allow protobuf 3.19.5 * explicitly exclude protobuf 4.21.0 --- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 1445b2d..37dc83a 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ dependencies = [ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.20.2, <5.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-memcache" diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index df8564d..4005dc5 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -6,4 +6,4 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.32.0 proto-plus==1.22.0 -protobuf==3.20.2 +protobuf==3.19.5 From ce83880ca90b2e5d4edf859dfe7f47bf5d7e4464 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 12:06:04 -0400 Subject: [PATCH 134/159] chore(main): release 1.4.4 (#225) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 17a0172..68a6eb4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.4.4](https://github.com/googleapis/python-memcache/compare/v1.4.3...v1.4.4) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#224](https://github.com/googleapis/python-memcache/issues/224)) ([90a04d3](https://github.com/googleapis/python-memcache/commit/90a04d303717f9a3decf88fc7516e788f57c2a2f)) + ## [1.4.3](https://github.com/googleapis/python-memcache/compare/v1.4.2...v1.4.3) (2022-09-29) diff --git a/setup.py b/setup.py index 37dc83a..97ad523 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-memcache" description = "Memorystore for Memcached API client library" -version = "1.4.3" +version = "1.4.4" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", From d9fb05dfae4f77651f3f7c370699162c963ddd27 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 02:10:19 +0000 Subject: [PATCH 135/159] chore(python): update dependencies in .kokoro/requirements.txt [autoapprove] (#227) Source-Link: https://togithub.com/googleapis/synthtool/commit/e3a1277ac35fc88c09db1930533e24292b132ced Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 325 +++++++++++++++++++++----------------- noxfile.py | 11 +- 3 files changed, 187 insertions(+), 151 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3815c98..12edee7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index d15994b..31425f1 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -110,29 +110,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +152,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.9.1 \ + --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \ + --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.0 \ + --hash=sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700 \ + --hash=sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d # via # gcp-releasetool # google-api-core @@ -178,72 +182,97 @@ google-cloud-storage==2.5.0 \ --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage googleapis-common-protos==1.56.4 \ --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +284,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.9.3 \ + --hash=sha256:69732a15cb1433bdfbc3b980a8a36a04878a6cfd7cb99f497b573f31618001c0 \ + --hash=sha256:69b01dd83c42f590250fe7a1f503fc229b14de83857314b1933a3ddbf595c4a5 # via # gcp-releasetool # twine @@ -303,9 +332,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -325,34 +354,34 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 @@ -377,9 +406,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +421,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +434,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +466,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,9 +476,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.6 \ + --hash=sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108 \ + --hash=sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ @@ -459,13 +488,13 @@ wheel==0.37.1 \ --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.0 \ + --hash=sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17 \ + --hash=sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356 # via -r requirements.in diff --git a/noxfile.py b/noxfile.py index ffe9f35..29d0200 100644 --- a/noxfile.py +++ b/noxfile.py @@ -278,7 +278,11 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -301,7 +305,10 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From 78ef907a5bc04552135b3e3969897e96a634cf46 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 19 Nov 2022 11:27:24 -0500 Subject: [PATCH 136/159] chore(python): update release script dependencies (#229) Source-Link: https://github.com/googleapis/synthtool/commit/25083af347468dd5f90f69627420f7d452b6c50e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/workflows/docs.yml | 4 +-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 2 +- .kokoro/docker/docs/Dockerfile | 12 +++---- .kokoro/requirements.in | 4 ++- .kokoro/requirements.txt | 61 ++++++++++++++++++---------------- noxfile.py | 4 +-- 8 files changed, 48 insertions(+), 43 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 12edee7..3f1ccc0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 + digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 7092a13..e97d89e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d2aee5b..16d5a9e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 87ade4d..23000c0 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 238b87b..f8137d0 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 7718391..cbd7e77 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 31425f1..9c1b9be 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -156,9 +159,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.9.1 \ - --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \ - --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ @@ -166,9 +169,9 @@ google-api-core==2.10.2 \ # via # google-cloud-core # google-cloud-storage -google-auth==2.14.0 \ - --hash=sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700 \ - --hash=sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -178,9 +181,9 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -256,9 +259,9 @@ google-resumable-media==2.4.0 \ --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ @@ -269,6 +272,7 @@ importlib-metadata==5.0.0 \ --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine jaraco-classes==3.2.3 \ --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ @@ -284,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.3 \ - --hash=sha256:69732a15cb1433bdfbc3b980a8a36a04878a6cfd7cb99f497b573f31618001c0 \ - --hash=sha256:69b01dd83c42f590250fe7a1f503fc229b14de83857314b1933a3ddbf595c4a5 +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -350,9 +354,9 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -381,7 +385,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core - # googleapis-common-protos py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 @@ -476,17 +479,17 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.6 \ - --hash=sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108 \ - --hash=sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in zipp==3.10.0 \ --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ @@ -494,7 +497,7 @@ zipp==3.10.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.0 \ - --hash=sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17 \ - --hash=sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/noxfile.py b/noxfile.py index 29d0200..d8440c0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -273,7 +273,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" @@ -299,7 +299,7 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" From c9c771af7c188c8c3ce66113b41a475d290aa6c2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Nov 2022 18:23:54 -0500 Subject: [PATCH 137/159] feat: maintenance schedules (#226) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update to gapic-generator-python 1.5.0 feat: add support for `google.cloud..__version__` PiperOrigin-RevId: 484665853 Source-Link: https://github.com/googleapis/googleapis/commit/8eb249a19db926c2fbc4ecf1dc09c0e521a88b22 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c8aa327b5f478865fc3fd91e3c2768e54e26ad44 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzhhYTMyN2I1ZjQ3ODg2NWZjM2ZkOTFlM2MyNzY4ZTU0ZTI2YWQ0NCJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update version in gapic_version.py * add .release-please-manifest.json with correct version * add owlbot.py to exclude generated gapic_version.py * set manifest to true in .github/release-please.yml * add release-please-config.json * chore: Update to gapic-generator-python 1.6.0 feat(python): Add typing to proto.Message based class attributes feat(python): Snippetgen handling of repeated enum field PiperOrigin-RevId: 487326846 Source-Link: https://github.com/googleapis/googleapis/commit/da380c77bb87ba0f752baf07605dd1db30e1f7e1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/61ef5762ee6731a0cbbfea22fd0eecee51ab1c8e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjFlZjU3NjJlZTY3MzFhMGNiYmZlYTIyZmQwZWVjZWU1MWFiMWM4ZSJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: new APIs added to reflect updates to the filestore service - Add ENTERPRISE Tier - Add snapshot APIs: RevertInstance, ListSnapshots, CreateSnapshot, DeleteSnapshot, UpdateSnapshot - Add multi-share APIs: ListShares, GetShare, CreateShare, DeleteShare, UpdateShare - Add ConnectMode to NetworkConfig (for Private Service Access support) - New status codes (SUSPENDED/SUSPENDING, REVERTING/RESUMING) - Add SuspensionReason (for KMS related suspension) - Add new fields to Instance information: max_capacity_gb, capacity_step_size_gb, max_share_count, capacity_gb, multi_share_enabled PiperOrigin-RevId: 487492758 Source-Link: https://github.com/googleapis/googleapis/commit/5be5981f50322cf0c7388595e0f31ac5d0693469 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab0e217f560cc2c1afc11441c2eab6b6950efd2b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWIwZTIxN2Y1NjBjYzJjMWFmYzExNDQxYzJlYWI2YjY5NTBlZmQyYiJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update path to snippet metadata json * chore: Update gapic-generator-python to v1.6.1 PiperOrigin-RevId: 488036204 Source-Link: https://github.com/googleapis/googleapis/commit/08f275f5c1c0d99056e1cb68376323414459ee19 Source-Link: https://github.com/googleapis/googleapis-gen/commit/555c0945e60649e38739ae64bc45719cdf72178f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU1YzA5NDVlNjA2NDllMzg3MzlhZTY0YmM0NTcxOWNkZjcyMTc4ZiJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: maintenance schedules PiperOrigin-RevId: 489029238 Source-Link: https://github.com/googleapis/googleapis/commit/7f891be94526fd0c9f75c85f7e89c092a731520f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e318ac6bb94eeeee750c1e830439ee96f54dc60a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTMxOGFjNmJiOTRlZWVlZTc1MGMxZTgzMDQzOWVlOTZmNTRkYzYwYSJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/release-please.yml | 1 + .release-please-manifest.json | 3 + docs/memcache_v1/types.rst | 1 - docs/memcache_v1beta2/types.rst | 1 - google/cloud/memcache/__init__.py | 16 + google/cloud/memcache/gapic_version.py | 16 + google/cloud/memcache_v1/__init__.py | 16 + google/cloud/memcache_v1/gapic_metadata.json | 10 + .../services/cloud_memcache/async_client.py | 608 ++++++++- .../services/cloud_memcache/client.py | 600 ++++++++- .../cloud_memcache/transports/base.py | 71 +- .../cloud_memcache/transports/grpc.py | 169 ++- .../cloud_memcache/transports/grpc_asyncio.py | 166 ++- google/cloud/memcache_v1/types/__init__.py | 12 + .../cloud/memcache_v1/types/cloud_memcache.py | 409 ++++-- google/cloud/memcache_v1beta2/__init__.py | 12 + .../memcache_v1beta2/gapic_metadata.json | 10 + .../services/cloud_memcache/async_client.py | 577 ++++++++- .../services/cloud_memcache/client.py | 567 ++++++++- .../cloud_memcache/transports/base.py | 71 +- .../cloud_memcache/transports/grpc.py | 156 ++- .../cloud_memcache/transports/grpc_asyncio.py | 153 ++- .../cloud/memcache_v1beta2/types/__init__.py | 8 + .../memcache_v1beta2/types/cloud_memcache.py | 337 +++-- owlbot.py | 56 + release-please-config.json | 28 + ...d_memcache_reschedule_maintenance_async.py | 57 + ...ud_memcache_reschedule_maintenance_sync.py | 57 + ...d_memcache_reschedule_maintenance_async.py | 57 + ...ud_memcache_reschedule_maintenance_sync.py | 57 + ...et_metadata_google.cloud.memcache.v1.json} | 184 ++- ...tadata_google.cloud.memcache.v1beta2.json} | 188 ++- scripts/fixup_memcache_v1_keywords.py | 1 + scripts/fixup_memcache_v1beta2_keywords.py | 1 + setup.py | 28 +- testing/constraints-3.10.txt | 6 + testing/constraints-3.11.txt | 6 + testing/constraints-3.7.txt | 2 +- testing/constraints-3.8.txt | 8 +- testing/constraints-3.9.txt | 8 +- .../gapic/memcache_v1/test_cloud_memcache.py | 1127 +++++++++++++++++ .../memcache_v1beta2/test_cloud_memcache.py | 1127 +++++++++++++++++ 42 files changed, 6524 insertions(+), 464 deletions(-) create mode 100644 .release-please-manifest.json create mode 100644 google/cloud/memcache/gapic_version.py create mode 100644 owlbot.py create mode 100644 release-please-config.json create mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py create mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py create mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py create mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py rename samples/generated_samples/{snippet_metadata_memcache_v1.json => snippet_metadata_google.cloud.memcache.v1.json} (86%) rename samples/generated_samples/{snippet_metadata_memcache_v1beta2.json => snippet_metadata_google.cloud.memcache.v1beta2.json} (87%) diff --git a/.github/release-please.yml b/.github/release-please.yml index 6def37a..e9a4f00 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,5 +1,6 @@ releaseType: python handleGHRelease: true +manifest: true # NOTE: this section is generated by synthtool.languages.python # See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py branches: diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..50f0c45 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "1.4.4" +} diff --git a/docs/memcache_v1/types.rst b/docs/memcache_v1/types.rst index 06bda21..5892ae0 100644 --- a/docs/memcache_v1/types.rst +++ b/docs/memcache_v1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Memcache v1 API .. automodule:: google.cloud.memcache_v1.types :members: - :undoc-members: :show-inheritance: diff --git a/docs/memcache_v1beta2/types.rst b/docs/memcache_v1beta2/types.rst index 19e52f6..1b47aa6 100644 --- a/docs/memcache_v1beta2/types.rst +++ b/docs/memcache_v1beta2/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Memcache v1beta2 API .. automodule:: google.cloud.memcache_v1beta2.types :members: - :undoc-members: :show-inheritance: diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index 0728147..8865b97 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.memcache import gapic_version as package_version + +__version__ = package_version.__version__ + from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( CloudMemcacheAsyncClient, @@ -26,11 +30,17 @@ Instance, ListInstancesRequest, ListInstancesResponse, + LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, + ZoneMetadata, ) __all__ = ( @@ -43,9 +53,15 @@ "Instance", "ListInstancesRequest", "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", "MemcacheVersion", ) diff --git a/google/cloud/memcache/gapic_version.py b/google/cloud/memcache/gapic_version.py new file mode 100644 index 0000000..25e4dd6 --- /dev/null +++ b/google/cloud/memcache/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.4.4" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py index a4dbffd..02ebf81 100644 --- a/google/cloud/memcache_v1/__init__.py +++ b/google/cloud/memcache_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.memcache import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.cloud_memcache import CloudMemcacheAsyncClient, CloudMemcacheClient from .types.cloud_memcache import ( @@ -23,11 +27,17 @@ Instance, ListInstancesRequest, ListInstancesResponse, + LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, + ZoneMetadata, ) __all__ = ( @@ -40,9 +50,15 @@ "Instance", "ListInstancesRequest", "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "MemcacheVersion", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", ) diff --git a/google/cloud/memcache_v1/gapic_metadata.json b/google/cloud/memcache_v1/gapic_metadata.json index 08d37fa..8c92ac7 100644 --- a/google/cloud/memcache_v1/gapic_metadata.json +++ b/google/cloud/memcache_v1/gapic_metadata.json @@ -35,6 +35,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -75,6 +80,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index b848c77..6dbb0c9 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -33,6 +43,8 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -184,9 +196,9 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud memcache client. @@ -230,11 +242,11 @@ def __init__( async def list_instances( self, - request: Union[cloud_memcache.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. @@ -267,7 +279,7 @@ async def sample_list_instances(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]]): The request object. Request for [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. parent (:class:`str`): @@ -347,11 +359,11 @@ async def sample_list_instances(): async def get_instance( self, - request: Union[cloud_memcache.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -383,7 +395,7 @@ async def sample_get_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]]): The request object. Request for [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. name (:class:`str`): @@ -403,7 +415,7 @@ async def sample_get_instance(): Returns: google.cloud.memcache_v1.types.Instance: - + A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -449,13 +461,13 @@ async def sample_get_instance(): async def create_instance( self, - request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance: cloud_memcache.Instance = None, - instance_id: str = None, + parent: Optional[str] = None, + instance: Optional[cloud_memcache.Instance] = None, + instance_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. @@ -499,7 +511,7 @@ async def sample_create_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]]): The request object. Request for [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. parent (:class:`str`): @@ -525,10 +537,10 @@ async def sample_create_instance(): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. - If any of the above are not met, will raise an invalid - argument error. + If any of the above are not met, the API raises an + invalid argument error. This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -544,7 +556,8 @@ async def sample_create_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -603,12 +616,12 @@ async def sample_create_instance(): async def update_instance( self, - request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, *, - instance: cloud_memcache.Instance = None, - update_mask: field_mask_pb2.FieldMask = None, + instance: Optional[cloud_memcache.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates an existing Instance in a given project and @@ -651,7 +664,7 @@ async def sample_update_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]]): The request object. Request for [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. instance (:class:`google.cloud.memcache_v1.types.Instance`): @@ -680,7 +693,8 @@ async def sample_update_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -739,19 +753,19 @@ async def sample_update_instance(): async def update_parameters( self, - request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, *, - name: str = None, - update_mask: field_mask_pb2.FieldMask = None, - parameters: cloud_memcache.MemcacheParameters = None, + name: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + r"""Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. .. code-block:: python @@ -784,7 +798,7 @@ async def sample_update_parameters(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]]): The request object. Request for [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. name (:class:`str`): @@ -818,7 +832,8 @@ async def sample_update_parameters(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -877,11 +892,11 @@ async def sample_update_parameters(): async def delete_instance( self, - request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. @@ -917,7 +932,7 @@ async def sample_delete_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]]): The request object. Request for [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. name (:class:`str`): @@ -1003,18 +1018,18 @@ async def sample_delete_instance(): async def apply_parameters( self, - request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, *, - name: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + name: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""ApplyParameters will restart the set of specified - nodes in order to update them to the current set of - parameters for the Memcached Instance. + r"""``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. .. code-block:: python @@ -1047,7 +1062,7 @@ async def sample_apply_parameters(): print(response) Args: - request (Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]]): The request object. Request for [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. name (:class:`str`): @@ -1058,20 +1073,18 @@ async def sample_apply_parameters(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (:class:`Sequence[str]`): - Nodes to which we should apply the - instance-level parameter group. + node_ids (:class:`MutableSequence[str]`): + Nodes to which the instance-level + parameter group is applied. This corresponds to the ``node_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. apply_all (:class:`bool`): - Whether to apply instance-level - parameter group to all nodes. If set to - true, will explicitly restrict users - from specifying any nodes, and apply - parameter group updates to all nodes - within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from + specifying individual nodes, and ``ApplyParameters`` + updates all nodes within the instance. This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this @@ -1087,7 +1100,8 @@ async def sample_apply_parameters(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -1144,6 +1158,476 @@ async def sample_apply_parameters(): # Done; return the response. return response + async def reschedule_maintenance( + self, + request: Optional[ + Union[cloud_memcache.RescheduleMaintenanceRequest, dict] + ] = None, + *, + instance: Optional[str] = None, + reschedule_type: Optional[ + cloud_memcache.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Reschedules upcoming maintenance event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memcache_v1 + + async def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.memcache_v1.types.RescheduleMaintenanceRequest, dict]]): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. + instance (:class:`str`): + Required. Memcache instance resource name using the + form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (:class:`google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType`): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Timestamp when the maintenance shall be rescheduled to + if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, + for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.RescheduleMaintenanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reschedule_maintenance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index b182f15..b27b5a5 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -36,6 +47,8 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -62,7 +75,7 @@ class CloudMemcacheClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[CloudMemcacheTransport]: """Returns an appropriate transport class. @@ -356,8 +369,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudMemcacheTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, CloudMemcacheTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud memcache client. @@ -371,7 +384,7 @@ def __init__( transport (Union[str, CloudMemcacheTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -401,6 +414,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -453,11 +467,11 @@ def __init__( def list_instances( self, - request: Union[cloud_memcache.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. @@ -570,11 +584,11 @@ def sample_list_instances(): def get_instance( self, - request: Union[cloud_memcache.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -626,7 +640,7 @@ def sample_get_instance(): Returns: google.cloud.memcache_v1.types.Instance: - + A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -672,13 +686,13 @@ def sample_get_instance(): def create_instance( self, - request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance: cloud_memcache.Instance = None, - instance_id: str = None, + parent: Optional[str] = None, + instance: Optional[cloud_memcache.Instance] = None, + instance_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a new Instance in a given location. @@ -748,10 +762,10 @@ def sample_create_instance(): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. - If any of the above are not met, will raise an invalid - argument error. + If any of the above are not met, the API raises an + invalid argument error. This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -767,7 +781,8 @@ def sample_create_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -826,12 +841,12 @@ def sample_create_instance(): def update_instance( self, - request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, *, - instance: cloud_memcache.Instance = None, - update_mask: field_mask_pb2.FieldMask = None, + instance: Optional[cloud_memcache.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates an existing Instance in a given project and @@ -903,7 +918,8 @@ def sample_update_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -962,19 +978,19 @@ def sample_update_instance(): def update_parameters( self, - request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, *, - name: str = None, - update_mask: field_mask_pb2.FieldMask = None, - parameters: cloud_memcache.MemcacheParameters = None, + name: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + r"""Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. .. code-block:: python @@ -1041,7 +1057,8 @@ def sample_update_parameters(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -1100,11 +1117,11 @@ def sample_update_parameters(): def delete_instance( self, - request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes a single Instance. @@ -1226,18 +1243,18 @@ def sample_delete_instance(): def apply_parameters( self, - request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, *, - name: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + name: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""ApplyParameters will restart the set of specified - nodes in order to update them to the current set of - parameters for the Memcached Instance. + r"""``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. .. code-block:: python @@ -1281,20 +1298,18 @@ def sample_apply_parameters(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (Sequence[str]): - Nodes to which we should apply the - instance-level parameter group. + node_ids (MutableSequence[str]): + Nodes to which the instance-level + parameter group is applied. This corresponds to the ``node_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. apply_all (bool): - Whether to apply instance-level - parameter group to all nodes. If set to - true, will explicitly restrict users - from specifying any nodes, and apply - parameter group updates to all nodes - within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from + specifying individual nodes, and ``ApplyParameters`` + updates all nodes within the instance. This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this @@ -1310,7 +1325,8 @@ def sample_apply_parameters(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -1367,6 +1383,151 @@ def sample_apply_parameters(): # Done; return the response. return response + def reschedule_maintenance( + self, + request: Optional[ + Union[cloud_memcache.RescheduleMaintenanceRequest, dict] + ] = None, + *, + instance: Optional[str] = None, + reschedule_type: Optional[ + cloud_memcache.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Reschedules upcoming maintenance event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memcache_v1 + + def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memcache_v1.types.RescheduleMaintenanceRequest, dict]): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. + instance (str): + Required. Memcache instance resource name using the + form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the maintenance shall be rescheduled to + if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, + for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.RescheduleMaintenanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.RescheduleMaintenanceRequest): + request = cloud_memcache.RescheduleMaintenanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self @@ -1380,6 +1541,331 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index e02460b..154e04c 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -22,6 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import pkg_resources @@ -49,7 +50,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -163,6 +164,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=1200.0, client_info=client_info, ), + self.reschedule_maintenance: gapic_v1.method.wrap_method( + self.reschedule_maintenance, + default_timeout=1200.0, + client_info=client_info, + ), } def close(self): @@ -245,6 +251,69 @@ def apply_parameters( ]: raise NotImplementedError() + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index a135b9a..c190673 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -20,6 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore @@ -65,14 +66,14 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -200,8 +201,8 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, @@ -375,10 +376,10 @@ def update_parameters( ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: r"""Return a callable for the update parameters method over gRPC. - Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Returns: Callable[[~.UpdateParametersRequest], @@ -430,9 +431,9 @@ def apply_parameters( ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: r"""Return a callable for the apply parameters method over gRPC. - ApplyParameters will restart the set of specified - nodes in order to update them to the current set of - parameters for the Memcached Instance. + ``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Returns: Callable[[~.ApplyParametersRequest], @@ -452,9 +453,143 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Reschedules upcoming maintenance event. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/RescheduleMaintenance", + request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reschedule_maintenance"] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index 3430e39..cfc1ad6 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -19,6 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -67,7 +68,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -110,15 +111,15 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, @@ -389,10 +390,10 @@ def update_parameters( ]: r"""Return a callable for the update parameters method over gRPC. - Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Returns: Callable[[~.UpdateParametersRequest], @@ -448,9 +449,9 @@ def apply_parameters( ]: r"""Return a callable for the apply parameters method over gRPC. - ApplyParameters will restart the set of specified - nodes in order to update them to the current set of - parameters for the Memcached Instance. + ``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Returns: Callable[[~.ApplyParametersRequest], @@ -470,8 +471,143 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Reschedules upcoming maintenance event. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/RescheduleMaintenance", + request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reschedule_maintenance"] + def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index adc89ef..3887af7 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -21,11 +21,17 @@ Instance, ListInstancesRequest, ListInstancesResponse, + LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, + ZoneMetadata, ) __all__ = ( @@ -36,9 +42,15 @@ "Instance", "ListInstancesRequest", "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index 1077426..bc1947e 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -13,8 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -22,6 +27,10 @@ manifest={ "MemcacheVersion", "Instance", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", + "RescheduleMaintenanceRequest", "ListInstancesRequest", "ListInstancesResponse", "GetInstanceRequest", @@ -32,6 +41,8 @@ "UpdateParametersRequest", "MemcacheParameters", "OperationMetadata", + "LocationMetadata", + "ZoneMetadata", }, ) @@ -43,7 +54,7 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): - r""" + r"""A Memorystore for Memcached instance Attributes: name (str): @@ -51,16 +62,17 @@ class Instance(proto.Message): including project and location using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note: Memcached instances are managed and addressed at - regional level so location_id here refers to a GCP region; - however, users may choose which zones Memcached nodes within - an instances should be provisioned in. Refer to [zones] + Note: Memcached instances are managed and addressed at the + regional level so ``location_id`` here refers to a Google + Cloud region; however, users may choose which zones + Memcached nodes should be provisioned in within an instance. + Refer to [zones][google.cloud.memcache.v1.Instance.zones] field for more details. display_name (str): - User provided name for the instance only used - for display purposes. Cannot be more than 80 - characters. - labels (Mapping[str, str]): + User provided name for the instance, which is + only used for display purposes. Cannot be more + than 80 characters. + labels (MutableMapping[str, str]): Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. @@ -70,9 +82,9 @@ class Instance(proto.Message): `network `__ to which the instance is connected. If left unspecified, the ``default`` network will be used. - zones (Sequence[str]): - Zones where Memcached nodes should be - provisioned in. Memcached nodes will be equally + zones (MutableSequence[str]): + Zones in which Memcached nodes should be + provisioned. Memcached nodes will be equally distributed across these zones. If not provided, the service will by default create nodes in all zones in the region for the instance. @@ -84,15 +96,16 @@ class Instance(proto.Message): memcache_version (google.cloud.memcache_v1.types.MemcacheVersion): The major version of Memcached software. If not provided, latest supported version will be used. Currently the latest - supported major version is MEMCACHE_1_5. The minor version - will be automatically determined by our system based on the - latest supported minor version. + supported major version is ``MEMCACHE_1_5``. The minor + version will be automatically determined by our system based + on the latest supported minor version. parameters (google.cloud.memcache_v1.types.MemcacheParameters): - Optional: User defined parameters to apply to - the memcached process on each node. - memcache_nodes (Sequence[google.cloud.memcache_v1.types.Instance.Node]): - Output only. List of Memcached nodes. Refer to [Node] - message for more details. + User defined parameters to apply to the + memcached process on each node. + memcache_nodes (MutableSequence[google.cloud.memcache_v1.types.Instance.Node]): + Output only. List of Memcached nodes. Refer to + [Node][google.cloud.memcache.v1.Instance.Node] message for + more details. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the instance was created. @@ -110,11 +123,18 @@ class Instance(proto.Message): MemcacheVersion. The full version format will be "memcached-1.5.16". - instance_messages (Sequence[google.cloud.memcache_v1.types.Instance.InstanceMessage]): - List of messages that describe current - statuses of memcached instance. + instance_messages (MutableSequence[google.cloud.memcache_v1.types.Instance.InstanceMessage]): + List of messages that describe the current + state of the Memcached instance. discovery_endpoint (str): - Output only. Endpoint for Discovery API + Output only. Endpoint for the Discovery API. + maintenance_policy (google.cloud.memcache_v1.types.MaintenancePolicy): + The maintenance policy for the instance. If + not provided, the maintenance event will be + performed based on Memorystore internal rollout + schedule. + maintenance_schedule (google.cloud.memcache_v1.types.MaintenanceSchedule): + Output only. Published maintenance schedule. """ class State(proto.Enum): @@ -122,6 +142,7 @@ class State(proto.Enum): STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 + UPDATING = 3 DELETING = 4 PERFORMING_MAINTENANCE = 5 @@ -136,11 +157,11 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field( + cpu_count: int = proto.Field( proto.INT32, number=1, ) - memory_size_mb = proto.Field( + memory_size_mb: int = proto.Field( proto.INT32, number=2, ) @@ -179,28 +200,28 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field( + node_id: str = proto.Field( proto.STRING, number=1, ) - zone = proto.Field( + zone: str = proto.Field( proto.STRING, number=2, ) - state = proto.Field( + state: "Instance.Node.State" = proto.Field( proto.ENUM, number=3, enum="Instance.Node.State", ) - host = proto.Field( + host: str = proto.Field( proto.STRING, number=4, ) - port = proto.Field( + port: int = proto.Field( proto.INT32, number=5, ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=6, message="MemcacheParameters", @@ -223,89 +244,247 @@ class Code(proto.Enum): CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 - code = proto.Field( + code: "Instance.InstanceMessage.Code" = proto.Field( proto.ENUM, number=1, enum="Instance.InstanceMessage.Code", ) - message = proto.Field( + message: str = proto.Field( proto.STRING, number=2, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - authorized_network = proto.Field( + authorized_network: str = proto.Field( proto.STRING, number=4, ) - zones = proto.RepeatedField( + zones: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=5, ) - node_count = proto.Field( + node_count: int = proto.Field( proto.INT32, number=6, ) - node_config = proto.Field( + node_config: NodeConfig = proto.Field( proto.MESSAGE, number=7, message=NodeConfig, ) - memcache_version = proto.Field( + memcache_version: "MemcacheVersion" = proto.Field( proto.ENUM, number=9, enum="MemcacheVersion", ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=11, message="MemcacheParameters", ) - memcache_nodes = proto.RepeatedField( + memcache_nodes: MutableSequence[Node] = proto.RepeatedField( proto.MESSAGE, number=12, message=Node, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=15, enum=State, ) - memcache_full_version = proto.Field( + memcache_full_version: str = proto.Field( proto.STRING, number=18, ) - instance_messages = proto.RepeatedField( + instance_messages: MutableSequence[InstanceMessage] = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - discovery_endpoint = proto.Field( + discovery_endpoint: str = proto.Field( proto.STRING, number=20, ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=21, + message="MaintenancePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=22, + message="MaintenanceSchedule", + ) + + +class MaintenancePolicy(proto.Message): + r"""Maintenance policy per instance. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + updated. + description (str): + Description of what this policy is for. Create/Update + methods return INVALID_ARGUMENT if the length is greater + than 512. + weekly_maintenance_window (MutableSequence[google.cloud.memcache_v1.types.WeeklyMaintenanceWindow]): + Required. Maintenance window that is applied to resources + covered by this policy. Minimum 1. For the current version, + the maximum number of weekly_maintenance_windows is expected + to be one. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + weekly_maintenance_window: MutableSequence[ + "WeeklyMaintenanceWindow" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="WeeklyMaintenanceWindow", + ) + + +class WeeklyMaintenanceWindow(proto.Message): + r"""Time window specified for weekly operations. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Required. Allows to define schedule that runs + specified day of the week. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time of the window in UTC. + duration (google.protobuf.duration_pb2.Duration): + Required. Duration of the time window. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class MaintenanceSchedule(proto.Message): + r"""Upcoming maintenance schedule. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of any upcoming + scheduled maintenance for this instance. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of any upcoming + scheduled maintenance for this instance. + schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deadline that the + maintenance schedule start time can not go + beyond, including reschedule. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class RescheduleMaintenanceRequest(proto.Message): + r"""Request for + [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. + + Attributes: + instance (str): + Required. Memcache instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + reschedule_type (google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set up + schedule_time as well. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the maintenance shall be rescheduled to if + reschedule_type=SPECIFIC_TIME, in RFC 3339 format, for + example ``2012-11-15T16:19:00.094Z``. + """ + + class RescheduleType(proto.Enum): + r"""Reschedule options.""" + RESCHEDULE_TYPE_UNSPECIFIED = 0 + IMMEDIATE = 1 + NEXT_AVAILABLE_WINDOW = 2 + SPECIFIC_TIME = 3 + + instance: str = proto.Field( + proto.STRING, + number=1, + ) + reschedule_type: RescheduleType = proto.Field( + proto.ENUM, + number=2, + enum=RescheduleType, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) class ListInstancesRequest(proto.Message): @@ -321,40 +500,40 @@ class ListInstancesRequest(proto.Message): The maximum number of items to return. If not specified, a default value of 1000 will be used by - the service. Regardless of the page_size value, the response - may include a partial list and a caller should only rely on - response's - [next_page_token][CloudMemcache.ListInstancesResponse.next_page_token] + the service. Regardless of the ``page_size`` value, the + response may include a partial list and a caller should only + rely on response's + [``next_page_token``][google.cloud.memcache.v1.ListInstancesResponse.next_page_token] to determine if there are more instances left to be queried. page_token (str): - The next_page_token value returned from a previous List + The ``next_page_token`` value returned from a previous List request, if any. filter (str): - List filter. For example, exclude all - Memcached instances with name as my-instance by - specifying "name != my-instance". + List filter. For example, exclude all Memcached instances + with name as my-instance by specifying + ``"name != my-instance"``. order_by (str): Sort results. Supported values are "name", "name desc" or "" (unsorted). """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=4, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=5, ) @@ -365,7 +544,7 @@ class ListInstancesResponse(proto.Message): [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. Attributes: - instances (Sequence[google.cloud.memcache_v1.types.Instance]): + instances (MutableSequence[google.cloud.memcache_v1.types.Instance]): A list of Memcached instances in the project in the specified location, or across all locations. @@ -376,7 +555,7 @@ class ListInstancesResponse(proto.Message): Token to retrieve the next page of results, or empty if there are no more results in the list. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -384,16 +563,16 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Instance", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -410,7 +589,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -434,23 +613,23 @@ class CreateInstanceRequest(proto.Message): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. - If any of the above are not met, will raise an invalid + If any of the above are not met, the API raises an invalid argument error. instance (google.cloud.memcache_v1.types.Instance): Required. A Memcached Instance """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - instance_id = proto.Field( + instance_id: str = proto.Field( proto.STRING, number=2, ) - instance = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, message="Instance", @@ -471,12 +650,12 @@ class UpdateInstanceRequest(proto.Message): update_mask are updated. """ - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, ) - instance = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, message="Instance", @@ -494,7 +673,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -509,26 +688,25 @@ class ApplyParametersRequest(proto.Message): Required. Resource name of the Memcached instance for which parameter group updates should be applied. - node_ids (Sequence[str]): - Nodes to which we should apply the - instance-level parameter group. + node_ids (MutableSequence[str]): + Nodes to which the instance-level parameter + group is applied. apply_all (bool): - Whether to apply instance-level parameter - group to all nodes. If set to true, will - explicitly restrict users from specifying any - nodes, and apply parameter group updates to all - nodes within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from specifying + individual nodes, and ``ApplyParameters`` updates all nodes + within the instance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - node_ids = proto.RepeatedField( + node_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - apply_all = proto.Field( + apply_all: bool = proto.Field( proto.BOOL, number=3, ) @@ -549,16 +727,16 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=3, message="MemcacheParameters", @@ -574,18 +752,19 @@ class MemcacheParameters(proto.Message): this set of parameters. Users can use this id to determine if the parameters associated with the instance differ from the parameters associated - with the nodes and any action needs to be taken - to apply parameters on nodes. - params (Mapping[str, str]): + with the nodes. A discrepancy between parameter + ids can inform users that they may need to take + action to apply parameters on nodes. + params (MutableMapping[str, str]): User defined set of parameters to use in the memcached process. """ - id = proto.Field( + id: str = proto.Field( proto.STRING, number=1, ) - params = proto.MapField( + params: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, @@ -622,36 +801,60 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - target = proto.Field( + target: str = proto.Field( proto.STRING, number=3, ) - verb = proto.Field( + verb: str = proto.Field( proto.STRING, number=4, ) - status_detail = proto.Field( + status_detail: str = proto.Field( proto.STRING, number=5, ) - cancel_requested = proto.Field( + cancel_requested: bool = proto.Field( proto.BOOL, number=6, ) - api_version = proto.Field( + api_version: str = proto.Field( proto.STRING, number=7, ) +class LocationMetadata(proto.Message): + r"""Metadata for the given + [google.cloud.location.Location][google.cloud.location.Location]. + + Attributes: + available_zones (MutableMapping[str, google.cloud.memcache_v1.types.ZoneMetadata]): + Output only. The set of available zones in the location. The + map is keyed by the lowercase ID of each zone, as defined by + GCE. These keys can be specified in the ``zones`` field when + creating a Memcached instance. + """ + + available_zones: MutableMapping[str, "ZoneMetadata"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message="ZoneMetadata", + ) + + +class ZoneMetadata(proto.Message): + r""" """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index 0efe11e..a655516 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.memcache import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.cloud_memcache import CloudMemcacheAsyncClient, CloudMemcacheClient from .types.cloud_memcache import ( @@ -25,11 +29,15 @@ ListInstancesRequest, ListInstancesResponse, LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, ZoneMetadata, ) @@ -45,10 +53,14 @@ "ListInstancesRequest", "ListInstancesResponse", "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "MemcacheVersion", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", "ZoneMetadata", ) diff --git a/google/cloud/memcache_v1beta2/gapic_metadata.json b/google/cloud/memcache_v1beta2/gapic_metadata.json index 288ef89..4143466 100644 --- a/google/cloud/memcache_v1beta2/gapic_metadata.json +++ b/google/cloud/memcache_v1beta2/gapic_metadata.json @@ -40,6 +40,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -85,6 +90,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 0c5c5cb..0f31f71 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -33,6 +43,8 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -184,9 +196,9 @@ def transport(self) -> CloudMemcacheTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud memcache client. @@ -230,11 +242,11 @@ def __init__( async def list_instances( self, - request: Union[cloud_memcache.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. @@ -267,7 +279,7 @@ async def sample_list_instances(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]]): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. parent (:class:`str`): @@ -347,11 +359,11 @@ async def sample_list_instances(): async def get_instance( self, - request: Union[cloud_memcache.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -383,7 +395,7 @@ async def sample_get_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]]): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. name (:class:`str`): @@ -449,13 +461,13 @@ async def sample_get_instance(): async def create_instance( self, - request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance_id: str = None, - resource: cloud_memcache.Instance = None, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. @@ -499,7 +511,7 @@ async def sample_create_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]]): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. parent (:class:`str`): @@ -604,12 +616,12 @@ async def sample_create_instance(): async def update_instance( self, - request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - resource: cloud_memcache.Instance = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates an existing Instance in a given project and @@ -652,7 +664,7 @@ async def sample_update_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]]): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -741,13 +753,13 @@ async def sample_update_instance(): async def update_parameters( self, - request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, *, - name: str = None, - update_mask: field_mask_pb2.FieldMask = None, - parameters: cloud_memcache.MemcacheParameters = None, + name: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates the defined Memcached parameters for an existing @@ -786,7 +798,7 @@ async def sample_update_parameters(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]]): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. name (:class:`str`): @@ -880,11 +892,11 @@ async def sample_update_parameters(): async def delete_instance( self, - request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. @@ -920,7 +932,7 @@ async def sample_delete_instance(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]]): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. name (:class:`str`): @@ -1006,13 +1018,13 @@ async def sample_delete_instance(): async def apply_parameters( self, - request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, *, - name: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + name: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""``ApplyParameters`` restarts the set of specified nodes in order @@ -1050,7 +1062,7 @@ async def sample_apply_parameters(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]]): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. name (:class:`str`): @@ -1061,7 +1073,7 @@ async def sample_apply_parameters(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (:class:`Sequence[str]`): + node_ids (:class:`MutableSequence[str]`): Nodes to which the instance-level parameter group is applied. @@ -1148,13 +1160,15 @@ async def sample_apply_parameters(): async def apply_software_update( self, - request: Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] = None, + request: Optional[ + Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] + ] = None, *, - instance: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + instance: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates software on the selected nodes of the @@ -1191,7 +1205,7 @@ async def sample_apply_software_update(): print(response) Args: - request (Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]): + request (Optional[Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]]): The request object. Request for [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. instance (:class:`str`): @@ -1202,7 +1216,7 @@ async def sample_apply_software_update(): This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (:class:`Sequence[str]`): + node_ids (:class:`MutableSequence[str]`): Nodes to which we should apply the update to. Note all the selected nodes are updated in parallel. @@ -1290,6 +1304,477 @@ async def sample_apply_software_update(): # Done; return the response. return response + async def reschedule_maintenance( + self, + request: Optional[ + Union[cloud_memcache.RescheduleMaintenanceRequest, dict] + ] = None, + *, + instance: Optional[str] = None, + reschedule_type: Optional[ + cloud_memcache.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Performs the apply phase of the RescheduleMaintenance + verb. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memcache_v1beta2 + + async def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest, dict]]): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. + instance (:class:`str`): + Required. Memcache instance resource name using the + form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (:class:`google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType`): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Timestamp when the maintenance shall be rescheduled to + if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, + for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.RescheduleMaintenanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reschedule_maintenance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 83d9691..936ea9a 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -16,7 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -36,6 +47,8 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -62,7 +75,7 @@ class CloudMemcacheClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[CloudMemcacheTransport]: """Returns an appropriate transport class. @@ -356,8 +369,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudMemcacheTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, CloudMemcacheTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud memcache client. @@ -371,7 +384,7 @@ def __init__( transport (Union[str, CloudMemcacheTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -401,6 +414,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -453,11 +467,11 @@ def __init__( def list_instances( self, - request: Union[cloud_memcache.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. @@ -570,11 +584,11 @@ def sample_list_instances(): def get_instance( self, - request: Union[cloud_memcache.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -672,13 +686,13 @@ def sample_get_instance(): def create_instance( self, - request: Union[cloud_memcache.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance_id: str = None, - resource: cloud_memcache.Instance = None, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a new Instance in a given location. @@ -827,12 +841,12 @@ def sample_create_instance(): def update_instance( self, - request: Union[cloud_memcache.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - resource: cloud_memcache.Instance = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates an existing Instance in a given project and @@ -964,13 +978,13 @@ def sample_update_instance(): def update_parameters( self, - request: Union[cloud_memcache.UpdateParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, *, - name: str = None, - update_mask: field_mask_pb2.FieldMask = None, - parameters: cloud_memcache.MemcacheParameters = None, + name: Optional[str] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates the defined Memcached parameters for an existing @@ -1103,11 +1117,11 @@ def sample_update_parameters(): def delete_instance( self, - request: Union[cloud_memcache.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes a single Instance. @@ -1229,13 +1243,13 @@ def sample_delete_instance(): def apply_parameters( self, - request: Union[cloud_memcache.ApplyParametersRequest, dict] = None, + request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, *, - name: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + name: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""``ApplyParameters`` restarts the set of specified nodes in order @@ -1284,7 +1298,7 @@ def sample_apply_parameters(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (Sequence[str]): + node_ids (MutableSequence[str]): Nodes to which the instance-level parameter group is applied. @@ -1371,13 +1385,15 @@ def sample_apply_parameters(): def apply_software_update( self, - request: Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] = None, + request: Optional[ + Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] + ] = None, *, - instance: str = None, - node_ids: Sequence[str] = None, - apply_all: bool = None, + instance: Optional[str] = None, + node_ids: Optional[MutableSequence[str]] = None, + apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates software on the selected nodes of the @@ -1425,7 +1441,7 @@ def sample_apply_software_update(): This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (Sequence[str]): + node_ids (MutableSequence[str]): Nodes to which we should apply the update to. Note all the selected nodes are updated in parallel. @@ -1513,6 +1529,152 @@ def sample_apply_software_update(): # Done; return the response. return response + def reschedule_maintenance( + self, + request: Optional[ + Union[cloud_memcache.RescheduleMaintenanceRequest, dict] + ] = None, + *, + instance: Optional[str] = None, + reschedule_type: Optional[ + cloud_memcache.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Performs the apply phase of the RescheduleMaintenance + verb. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memcache_v1beta2 + + def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest, dict]): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. + instance (str): + Required. Memcache instance resource name using the + form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the maintenance shall be rescheduled to + if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, + for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.RescheduleMaintenanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.RescheduleMaintenanceRequest): + request = cloud_memcache.RescheduleMaintenanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self @@ -1526,6 +1688,331 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index a6e6a4a..67a6078 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -22,6 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import pkg_resources @@ -49,7 +50,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -168,6 +169,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=1200.0, client_info=client_info, ), + self.reschedule_maintenance: gapic_v1.method.wrap_method( + self.reschedule_maintenance, + default_timeout=1200.0, + client_info=client_info, + ), } def close(self): @@ -259,6 +265,69 @@ def apply_software_update( ]: raise NotImplementedError() + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 4be1255..5035a2e 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -20,6 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore @@ -65,14 +66,14 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -200,8 +201,8 @@ def __init__( def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, @@ -481,9 +482,144 @@ def apply_software_update( ) return self._stubs["apply_software_update"] + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Performs the apply phase of the RescheduleMaintenance + verb. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/RescheduleMaintenance", + request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reschedule_maintenance"] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index d0310f8..68085b2 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -19,6 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -67,7 +68,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): def create_channel( cls, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -110,15 +111,15 @@ def __init__( self, *, host: str = "memcache.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, @@ -499,8 +500,144 @@ def apply_software_update( ) return self._stubs["apply_software_update"] + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Performs the apply phase of the RescheduleMaintenance + verb. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/RescheduleMaintenance", + request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reschedule_maintenance"] + def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 073427d..5605887 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -23,11 +23,15 @@ ListInstancesRequest, ListInstancesResponse, LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, MemcacheParameters, MemcacheVersion, OperationMetadata, + RescheduleMaintenanceRequest, UpdateInstanceRequest, UpdateParametersRequest, + WeeklyMaintenanceWindow, ZoneMetadata, ) @@ -41,10 +45,14 @@ "ListInstancesRequest", "ListInstancesResponse", "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", "MemcacheParameters", "OperationMetadata", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", "UpdateParametersRequest", + "WeeklyMaintenanceWindow", "ZoneMetadata", "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index b4b5c8c..6cd3f19 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -13,8 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -22,12 +27,16 @@ manifest={ "MemcacheVersion", "Instance", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", "ListInstancesRequest", "ListInstancesResponse", "GetInstanceRequest", "CreateInstanceRequest", "UpdateInstanceRequest", "DeleteInstanceRequest", + "RescheduleMaintenanceRequest", "ApplyParametersRequest", "UpdateParametersRequest", "ApplySoftwareUpdateRequest", @@ -65,7 +74,7 @@ class Instance(proto.Message): User provided name for the instance, which is only used for display purposes. Cannot be more than 80 characters. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Resource labels to represent user-provided metadata. Refer to cloud documentation on labels for more details. @@ -75,7 +84,7 @@ class Instance(proto.Message): `network `__ to which the instance is connected. If left unspecified, the ``default`` network will be used. - zones (Sequence[str]): + zones (MutableSequence[str]): Zones in which Memcached nodes should be provisioned. Memcached nodes will be equally distributed across these zones. If not provided, @@ -93,9 +102,9 @@ class Instance(proto.Message): version will be automatically determined by our system based on the latest supported minor version. parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): - Optional: User defined parameters to apply to - the memcached process on each node. - memcache_nodes (Sequence[google.cloud.memcache_v1beta2.types.Instance.Node]): + User defined parameters to apply to the + memcached process on each node. + memcache_nodes (MutableSequence[google.cloud.memcache_v1beta2.types.Instance.Node]): Output only. List of Memcached nodes. Refer to [Node][google.cloud.memcache.v1beta2.Instance.Node] message for more details. @@ -116,7 +125,7 @@ class Instance(proto.Message): MemcacheVersion. The full version format will be "memcached-1.5.16". - instance_messages (Sequence[google.cloud.memcache_v1beta2.types.Instance.InstanceMessage]): + instance_messages (MutableSequence[google.cloud.memcache_v1beta2.types.Instance.InstanceMessage]): List of messages that describe the current state of the Memcached instance. discovery_endpoint (str): @@ -124,6 +133,13 @@ class Instance(proto.Message): update_available (bool): Output only. Returns true if there is an update waiting to be applied + maintenance_policy (google.cloud.memcache_v1beta2.types.MaintenancePolicy): + The maintenance policy for the instance. If + not provided, the maintenance event will be + performed based on Memorystore internal rollout + schedule. + maintenance_schedule (google.cloud.memcache_v1beta2.types.MaintenanceSchedule): + Output only. Published maintenance schedule. """ class State(proto.Enum): @@ -131,6 +147,7 @@ class State(proto.Enum): STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 + UPDATING = 3 DELETING = 4 PERFORMING_MAINTENANCE = 5 @@ -145,11 +162,11 @@ class NodeConfig(proto.Message): Memcached node. """ - cpu_count = proto.Field( + cpu_count: int = proto.Field( proto.INT32, number=1, ) - memory_size_mb = proto.Field( + memory_size_mb: int = proto.Field( proto.INT32, number=2, ) @@ -191,33 +208,33 @@ class State(proto.Enum): DELETING = 3 UPDATING = 4 - node_id = proto.Field( + node_id: str = proto.Field( proto.STRING, number=1, ) - zone = proto.Field( + zone: str = proto.Field( proto.STRING, number=2, ) - state = proto.Field( + state: "Instance.Node.State" = proto.Field( proto.ENUM, number=3, enum="Instance.Node.State", ) - host = proto.Field( + host: str = proto.Field( proto.STRING, number=4, ) - port = proto.Field( + port: int = proto.Field( proto.INT32, number=5, ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=6, message="MemcacheParameters", ) - update_available = proto.Field( + update_available: bool = proto.Field( proto.BOOL, number=7, ) @@ -239,93 +256,210 @@ class Code(proto.Enum): CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 - code = proto.Field( + code: "Instance.InstanceMessage.Code" = proto.Field( proto.ENUM, number=1, enum="Instance.InstanceMessage.Code", ) - message = proto.Field( + message: str = proto.Field( proto.STRING, number=2, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - authorized_network = proto.Field( + authorized_network: str = proto.Field( proto.STRING, number=4, ) - zones = proto.RepeatedField( + zones: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=5, ) - node_count = proto.Field( + node_count: int = proto.Field( proto.INT32, number=6, ) - node_config = proto.Field( + node_config: NodeConfig = proto.Field( proto.MESSAGE, number=7, message=NodeConfig, ) - memcache_version = proto.Field( + memcache_version: "MemcacheVersion" = proto.Field( proto.ENUM, number=9, enum="MemcacheVersion", ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=11, message="MemcacheParameters", ) - memcache_nodes = proto.RepeatedField( + memcache_nodes: MutableSequence[Node] = proto.RepeatedField( proto.MESSAGE, number=12, message=Node, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=15, enum=State, ) - memcache_full_version = proto.Field( + memcache_full_version: str = proto.Field( proto.STRING, number=18, ) - instance_messages = proto.RepeatedField( + instance_messages: MutableSequence[InstanceMessage] = proto.RepeatedField( proto.MESSAGE, number=19, message=InstanceMessage, ) - discovery_endpoint = proto.Field( + discovery_endpoint: str = proto.Field( proto.STRING, number=20, ) - update_available = proto.Field( + update_available: bool = proto.Field( proto.BOOL, number=21, ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=22, + message="MaintenancePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=23, + message="MaintenanceSchedule", + ) + + +class MaintenancePolicy(proto.Message): + r"""Maintenance policy per instance. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + updated. + description (str): + Description of what this policy is for. Create/Update + methods return INVALID_ARGUMENT if the length is greater + than 512. + weekly_maintenance_window (MutableSequence[google.cloud.memcache_v1beta2.types.WeeklyMaintenanceWindow]): + Required. Maintenance window that is applied to resources + covered by this policy. Minimum 1. For the current version, + the maximum number of weekly_maintenance_windows is expected + to be one. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + weekly_maintenance_window: MutableSequence[ + "WeeklyMaintenanceWindow" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="WeeklyMaintenanceWindow", + ) + + +class WeeklyMaintenanceWindow(proto.Message): + r"""Time window specified for weekly operations. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Required. Allows to define schedule that runs + specified day of the week. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time of the window in UTC. + duration (google.protobuf.duration_pb2.Duration): + Required. Duration of the time window. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class MaintenanceSchedule(proto.Message): + r"""Upcoming maintenance schedule. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of any upcoming + scheduled maintenance for this instance. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of any upcoming + scheduled maintenance for this instance. + schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deadline that the + maintenance schedule start time can not go + beyond, including reschedule. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) class ListInstancesRequest(proto.Message): @@ -358,23 +492,23 @@ class ListInstancesRequest(proto.Message): "name desc" or "" (unsorted). """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=4, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=5, ) @@ -385,7 +519,7 @@ class ListInstancesResponse(proto.Message): [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. Attributes: - resources (Sequence[google.cloud.memcache_v1beta2.types.Instance]): + resources (MutableSequence[google.cloud.memcache_v1beta2.types.Instance]): A list of Memcached instances in the project in the specified location, or across all locations. @@ -396,7 +530,7 @@ class ListInstancesResponse(proto.Message): Token to retrieve the next page of results, or empty if there are no more results in the list. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -404,16 +538,16 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - resources = proto.RepeatedField( + resources: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Instance", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -430,7 +564,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -462,15 +596,15 @@ class CreateInstanceRequest(proto.Message): Required. A Memcached [Instance] resource """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - instance_id = proto.Field( + instance_id: str = proto.Field( proto.STRING, number=2, ) - resource = proto.Field( + resource: "Instance" = proto.Field( proto.MESSAGE, number=3, message="Instance", @@ -491,12 +625,12 @@ class UpdateInstanceRequest(proto.Message): specified in update_mask are updated. """ - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, ) - resource = proto.Field( + resource: "Instance" = proto.Field( proto.MESSAGE, number=2, message="Instance", @@ -514,12 +648,53 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) +class RescheduleMaintenanceRequest(proto.Message): + r"""Request for + [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. + + Attributes: + instance (str): + Required. Memcache instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + reschedule_type (google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set up + schedule_time as well. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when the maintenance shall be rescheduled to if + reschedule_type=SPECIFIC_TIME, in RFC 3339 format, for + example ``2012-11-15T16:19:00.094Z``. + """ + + class RescheduleType(proto.Enum): + r"""Reschedule options.""" + RESCHEDULE_TYPE_UNSPECIFIED = 0 + IMMEDIATE = 1 + NEXT_AVAILABLE_WINDOW = 2 + SPECIFIC_TIME = 3 + + instance: str = proto.Field( + proto.STRING, + number=1, + ) + reschedule_type: RescheduleType = proto.Field( + proto.ENUM, + number=2, + enum=RescheduleType, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class ApplyParametersRequest(proto.Message): r"""Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. @@ -529,7 +704,7 @@ class ApplyParametersRequest(proto.Message): Required. Resource name of the Memcached instance for which parameter group updates should be applied. - node_ids (Sequence[str]): + node_ids (MutableSequence[str]): Nodes to which the instance-level parameter group is applied. apply_all (bool): @@ -539,15 +714,15 @@ class ApplyParametersRequest(proto.Message): within the instance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - node_ids = proto.RepeatedField( + node_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - apply_all = proto.Field( + apply_all: bool = proto.Field( proto.BOOL, number=3, ) @@ -568,16 +743,16 @@ class UpdateParametersRequest(proto.Message): The parameters to apply to the instance. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, ) - parameters = proto.Field( + parameters: "MemcacheParameters" = proto.Field( proto.MESSAGE, number=3, message="MemcacheParameters", @@ -593,7 +768,7 @@ class ApplySoftwareUpdateRequest(proto.Message): Required. Resource name of the Memcached instance for which software update should be applied. - node_ids (Sequence[str]): + node_ids (MutableSequence[str]): Nodes to which we should apply the update to. Note all the selected nodes are updated in parallel. @@ -605,40 +780,42 @@ class ApplySoftwareUpdateRequest(proto.Message): instance. """ - instance = proto.Field( + instance: str = proto.Field( proto.STRING, number=1, ) - node_ids = proto.RepeatedField( + node_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - apply_all = proto.Field( + apply_all: bool = proto.Field( proto.BOOL, number=3, ) class MemcacheParameters(proto.Message): - r"""The unique ID associated with this set of parameters. Users - can use this id to determine if the parameters associated with - the instance differ from the parameters associated with the - nodes. A discrepancy between parameter ids can inform users that - they may need to take action to apply parameters on nodes. + r""" Attributes: id (str): - Output only. - params (Mapping[str, str]): + Output only. The unique ID associated with + this set of parameters. Users can use this id to + determine if the parameters associated with the + instance differ from the parameters associated + with the nodes. A discrepancy between parameter + ids can inform users that they may need to take + action to apply parameters on nodes. + params (MutableMapping[str, str]): User defined set of parameters to use in the memcached process. """ - id = proto.Field( + id: str = proto.Field( proto.STRING, number=1, ) - params = proto.MapField( + params: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, @@ -675,33 +852,33 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - target = proto.Field( + target: str = proto.Field( proto.STRING, number=3, ) - verb = proto.Field( + verb: str = proto.Field( proto.STRING, number=4, ) - status_detail = proto.Field( + status_detail: str = proto.Field( proto.STRING, number=5, ) - cancel_requested = proto.Field( + cancel_requested: bool = proto.Field( proto.BOOL, number=6, ) - api_version = proto.Field( + api_version: str = proto.Field( proto.STRING, number=7, ) @@ -712,14 +889,14 @@ class LocationMetadata(proto.Message): [google.cloud.location.Location][google.cloud.location.Location]. Attributes: - available_zones (Mapping[str, google.cloud.memcache_v1beta2.types.ZoneMetadata]): + available_zones (MutableMapping[str, google.cloud.memcache_v1beta2.types.ZoneMetadata]): Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by GCE. These keys can be specified in the ``zones`` field when creating a Memcached instance. """ - available_zones = proto.MapField( + available_zones: MutableMapping[str, "ZoneMetadata"] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, diff --git a/owlbot.py b/owlbot.py new file mode 100644 index 0000000..ce738f0 --- /dev/null +++ b/owlbot.py @@ -0,0 +1,56 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from pathlib import Path +import shutil + +import synthtool as s +import synthtool.gcp as gcp +from synthtool.languages import python + +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +clean_up_generated_samples = True + +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) + +for library in s.get_staging_dirs(default_version): + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + s.move([library], excludes=["**/gapic_version.py"]) +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- + +templated_files = gcp.CommonTemplates().py_library( + cov_level=100, + microgenerator=True, + versions=gcp.common.detect_versions(path="./google", default_first=True), +) +s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml"]) + +python.py_samples(skip_readmes=True) + +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "format"], cwd=noxfile.parent, hide_output=False) diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 0000000..1c8d7b9 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,28 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/cloud/memcache/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json", + "jsonpath": "$.clientLibrary.version" + }, + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json", + "jsonpath": "$.clientLibrary.version" + } + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "0.1.0" +} diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py new file mode 100644 index 0000000..1ebd88f --- /dev/null +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memcache_v1 + + +async def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py new file mode 100644 index 0000000..821c80e --- /dev/null +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memcache_v1 + + +def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py new file mode 100644 index 0000000..867988e --- /dev/null +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memcache_v1beta2 + + +async def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheAsyncClient() + + # Initialize request argument(s) + request = memcache_v1beta2.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py new file mode 100644 index 0000000..20b2f08 --- /dev/null +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memcache + + +# [START memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memcache_v1beta2 + + +def sample_reschedule_maintenance(): + # Create a client + client = memcache_v1beta2.CloudMemcacheClient() + + # Initialize request argument(s) + request = memcache_v1beta2.RescheduleMaintenanceRequest( + instance="instance_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync] diff --git a/samples/generated_samples/snippet_metadata_memcache_v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json similarity index 86% rename from samples/generated_samples/snippet_metadata_memcache_v1.json rename to samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 52b01b4..4fbe6f0 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-memcache" + "name": "google-cloud-memcache", + "version": "0.1.0" }, "snippets": [ { @@ -38,7 +39,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -126,7 +127,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -847,6 +848,183 @@ ], "title": "memcache_v1_generated_cloud_memcache_list_instances_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", + "shortName": "CloudMemcache" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memcache.v1.CloudMemcache", + "shortName": "CloudMemcache" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json similarity index 87% rename from samples/generated_samples/snippet_metadata_memcache_v1beta2.json rename to samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index e67f9f6..a19a59a 100644 --- a/samples/generated_samples/snippet_metadata_memcache_v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-memcache" + "name": "google-cloud-memcache", + "version": "0.1.0" }, "snippets": [ { @@ -38,7 +39,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -126,7 +127,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -215,7 +216,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -303,7 +304,7 @@ }, { "name": "node_ids", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "apply_all", @@ -1024,6 +1025,183 @@ ], "title": "memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", + "shortName": "CloudMemcacheAsyncClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", + "shortName": "CloudMemcache" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", + "shortName": "CloudMemcacheClient" + }, + "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", + "shortName": "CloudMemcache" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py index 84308d4..de61ce0 100644 --- a/scripts/fixup_memcache_v1_keywords.py +++ b/scripts/fixup_memcache_v1_keywords.py @@ -44,6 +44,7 @@ class memcacheCallTransformer(cst.CSTTransformer): 'delete_instance': ('name', ), 'get_instance': ('name', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'reschedule_maintenance': ('instance', 'reschedule_type', 'schedule_time', ), 'update_instance': ('update_mask', 'instance', ), 'update_parameters': ('name', 'update_mask', 'parameters', ), } diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index 4b5aa32..b082016 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -45,6 +45,7 @@ class memcacheCallTransformer(cst.CSTTransformer): 'delete_instance': ('name', ), 'get_instance': ('name', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'reschedule_maintenance': ('instance', 'reschedule_type', 'schedule_time', ), 'update_instance': ('update_mask', 'resource', ), 'update_parameters': ('name', 'update_mask', 'parameters', ), } diff --git a/setup.py b/setup.py index 97ad523..cad6f1c 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- - -# Copyright (C) 2019 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,18 +13,30 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import io import os -import setuptools +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-memcache" -description = "Memorystore for Memcached API client library" -version = "1.4.4" -release_status = "Development Status :: 5 - Production/Stable" + + +description = "Google Cloud Memcache API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/memcache/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + dependencies = [ - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] @@ -47,7 +58,6 @@ if "google.cloud" in packages: namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index e69de29..ed7f9ae 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29..ed7f9ae 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 4005dc5..6f3158c 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.32.0 +google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index da93009..ed7f9ae 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -1,2 +1,6 @@ -# This constraints file is left inentionally empty -# so the latest version of dependencies is installed \ No newline at end of file +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index da93009..ed7f9ae 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -1,2 +1,6 @@ -# This constraints file is left inentionally empty -# so the latest version of dependencies is installed \ No newline at end of file +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 0817bd9..6c9e884 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -39,10 +39,15 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -2594,6 +2599,267 @@ async def test_apply_parameters_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.RescheduleMaintenanceRequest, + dict, + ], +) +def test_reschedule_maintenance(request_type, transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_reschedule_maintenance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + client.reschedule_maintenance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async( + transport: str = "grpc_asyncio", + request_type=cloud_memcache.RescheduleMaintenanceRequest, +): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async_from_dict(): + await test_reschedule_maintenance_async(request_type=dict) + + +def test_reschedule_maintenance_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.RescheduleMaintenanceRequest() + + request.instance = "instance_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance=instance_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.RescheduleMaintenanceRequest() + + request.instance = "instance_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance=instance_value", + ) in kw["metadata"] + + +def test_reschedule_maintenance_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reschedule_maintenance( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +def test_reschedule_maintenance_flattened_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reschedule_maintenance( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_error_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( @@ -2738,6 +3004,13 @@ def test_cloud_memcache_base_transport(): "update_parameters", "delete_instance", "apply_parameters", + "reschedule_maintenance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3286,6 +3559,860 @@ async def test_transport_close_async(): close.assert_called_once() +def test_delete_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index c45ad19..8fe7f1d 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -39,10 +39,15 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -2859,6 +2864,267 @@ async def test_apply_software_update_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.RescheduleMaintenanceRequest, + dict, + ], +) +def test_reschedule_maintenance(request_type, transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_reschedule_maintenance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + client.reschedule_maintenance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async( + transport: str = "grpc_asyncio", + request_type=cloud_memcache.RescheduleMaintenanceRequest, +): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async_from_dict(): + await test_reschedule_maintenance_async(request_type=dict) + + +def test_reschedule_maintenance_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.RescheduleMaintenanceRequest() + + request.instance = "instance_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance=instance_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.RescheduleMaintenanceRequest() + + request.instance = "instance_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "instance=instance_value", + ) in kw["metadata"] + + +def test_reschedule_maintenance_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reschedule_maintenance( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +def test_reschedule_maintenance_flattened_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reschedule_maintenance( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_error_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( @@ -3004,6 +3270,13 @@ def test_cloud_memcache_base_transport(): "delete_instance", "apply_parameters", "apply_software_update", + "reschedule_maintenance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3552,6 +3825,860 @@ async def test_transport_close_async(): close.assert_called_once() +def test_delete_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudMemcacheAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", From b1f7a36fa9649dcd345220f692c29f676d858cdc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Dec 2022 10:26:06 -0500 Subject: [PATCH 138/159] fix(deps): Require google-api-core >=1.34.0, >=2.11.0 (#231) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(deps): Require google-api-core >=1.34.0, >=2.11.0 fix: Drop usage of pkg_resources fix: Fix timeout default values docs(samples): Snippetgen should call await on the operation coroutine before calling result PiperOrigin-RevId: 493260409 Source-Link: https://github.com/googleapis/googleapis/commit/fea43879f83a8d0dacc9353b3f75f8f46d37162f Source-Link: https://github.com/googleapis/googleapis-gen/commit/387b7344c7529ee44be84e613b19a820508c612b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzg3YjczNDRjNzUyOWVlNDRiZTg0ZTYxM2IxOWE4MjA1MDhjNjEyYiJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add gapic_version.py * add gapic_version.py Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .coveragerc | 5 -- google/cloud/memcache_v1/gapic_version.py | 16 +++++ .../services/cloud_memcache/async_client.py | 54 ++++++++--------- .../services/cloud_memcache/client.py | 42 ++++++-------- .../cloud_memcache/transports/base.py | 13 ++--- .../cloud/memcache_v1beta2/gapic_version.py | 16 +++++ .../services/cloud_memcache/async_client.py | 58 +++++++++---------- .../services/cloud_memcache/client.py | 44 +++++++------- .../cloud_memcache/transports/base.py | 13 ++--- release-please-config.json | 2 + ...d_cloud_memcache_apply_parameters_async.py | 2 +- ...ed_cloud_memcache_create_instance_async.py | 2 +- ...ed_cloud_memcache_delete_instance_async.py | 2 +- ...d_memcache_reschedule_maintenance_async.py | 2 +- ...ed_cloud_memcache_update_instance_async.py | 2 +- ..._cloud_memcache_update_parameters_async.py | 2 +- ...d_cloud_memcache_apply_parameters_async.py | 2 +- ...ud_memcache_apply_software_update_async.py | 2 +- ...ed_cloud_memcache_create_instance_async.py | 2 +- ...ed_cloud_memcache_delete_instance_async.py | 2 +- ...d_memcache_reschedule_maintenance_async.py | 2 +- ...ed_cloud_memcache_update_instance_async.py | 2 +- ..._cloud_memcache_update_parameters_async.py | 2 +- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- 25 files changed, 148 insertions(+), 145 deletions(-) create mode 100644 google/cloud/memcache_v1/gapic_version.py create mode 100644 google/cloud/memcache_v1beta2/gapic_version.py diff --git a/.coveragerc b/.coveragerc index 028e2ca..c090f54 100644 --- a/.coveragerc +++ b/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/google/cloud/memcache_v1/gapic_version.py b/google/cloud/memcache_v1/gapic_version.py new file mode 100644 index 0000000..25e4dd6 --- /dev/null +++ b/google/cloud/memcache_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.4.4" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 6dbb0c9..819dc9a 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -34,7 +34,8 @@ from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.memcache_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -246,7 +247,7 @@ async def list_instances( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. @@ -363,7 +364,7 @@ async def get_instance( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -467,7 +468,7 @@ async def create_instance( instance: Optional[cloud_memcache.Instance] = None, instance_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. @@ -505,7 +506,7 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -621,7 +622,7 @@ async def update_instance( instance: Optional[cloud_memcache.Instance] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates an existing Instance in a given project and @@ -658,7 +659,7 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -759,7 +760,7 @@ async def update_parameters( update_mask: Optional[field_mask_pb2.FieldMask] = None, parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates the defined Memcached parameters for an existing @@ -792,7 +793,7 @@ async def sample_update_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -896,7 +897,7 @@ async def delete_instance( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. @@ -926,7 +927,7 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1024,7 +1025,7 @@ async def apply_parameters( node_ids: Optional[MutableSequence[str]] = None, apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""``ApplyParameters`` restarts the set of specified nodes in order @@ -1056,7 +1057,7 @@ async def sample_apply_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1170,7 +1171,7 @@ async def reschedule_maintenance( ] = None, schedule_time: Optional[timestamp_pb2.Timestamp] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Reschedules upcoming maintenance event. @@ -1201,7 +1202,7 @@ async def sample_reschedule_maintenance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1308,7 +1309,7 @@ async def list_operations( request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1362,7 +1363,7 @@ async def get_operation( request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1416,7 +1417,7 @@ async def delete_operation( request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1471,7 +1472,7 @@ async def cancel_operation( request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1525,7 +1526,7 @@ async def get_location( request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1579,7 +1580,7 @@ async def list_locations( request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1635,14 +1636,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index b27b5a5..d00e4f2 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -38,7 +38,8 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.memcache_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -471,7 +472,7 @@ def list_instances( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. @@ -588,7 +589,7 @@ def get_instance( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -692,7 +693,7 @@ def create_instance( instance: Optional[cloud_memcache.Instance] = None, instance_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a new Instance in a given location. @@ -846,7 +847,7 @@ def update_instance( instance: Optional[cloud_memcache.Instance] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates an existing Instance in a given project and @@ -984,7 +985,7 @@ def update_parameters( update_mask: Optional[field_mask_pb2.FieldMask] = None, parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates the defined Memcached parameters for an existing @@ -1121,7 +1122,7 @@ def delete_instance( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes a single Instance. @@ -1249,7 +1250,7 @@ def apply_parameters( node_ids: Optional[MutableSequence[str]] = None, apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""``ApplyParameters`` restarts the set of specified nodes in order @@ -1395,7 +1396,7 @@ def reschedule_maintenance( ] = None, schedule_time: Optional[timestamp_pb2.Timestamp] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Reschedules upcoming maintenance event. @@ -1546,7 +1547,7 @@ def list_operations( request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1600,7 +1601,7 @@ def get_operation( request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1654,7 +1655,7 @@ def delete_operation( request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1709,7 +1710,7 @@ def cancel_operation( request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1763,7 +1764,7 @@ def get_location( request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1817,7 +1818,7 @@ def list_locations( request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1867,14 +1868,9 @@ def list_locations( return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index 154e04c..3f787c2 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -25,18 +25,13 @@ from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources +from google.cloud.memcache_v1 import gapic_version as package_version from google.cloud.memcache_v1.types import cloud_memcache -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class CloudMemcacheTransport(abc.ABC): diff --git a/google/cloud/memcache_v1beta2/gapic_version.py b/google/cloud/memcache_v1beta2/gapic_version.py new file mode 100644 index 0000000..25e4dd6 --- /dev/null +++ b/google/cloud/memcache_v1beta2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.4.4" # {x-release-please-version} diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 0f31f71..f3f032f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -34,7 +34,8 @@ from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.memcache_v1beta2 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -246,7 +247,7 @@ async def list_instances( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists Instances in a given location. @@ -363,7 +364,7 @@ async def get_instance( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -467,7 +468,7 @@ async def create_instance( instance_id: Optional[str] = None, resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a new Instance in a given location. @@ -505,7 +506,7 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -621,7 +622,7 @@ async def update_instance( update_mask: Optional[field_mask_pb2.FieldMask] = None, resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates an existing Instance in a given project and @@ -658,7 +659,7 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -759,7 +760,7 @@ async def update_parameters( update_mask: Optional[field_mask_pb2.FieldMask] = None, parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates the defined Memcached parameters for an existing @@ -792,7 +793,7 @@ async def sample_update_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -896,7 +897,7 @@ async def delete_instance( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a single Instance. @@ -926,7 +927,7 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1024,7 +1025,7 @@ async def apply_parameters( node_ids: Optional[MutableSequence[str]] = None, apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""``ApplyParameters`` restarts the set of specified nodes in order @@ -1056,7 +1057,7 @@ async def sample_apply_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1168,7 +1169,7 @@ async def apply_software_update( node_ids: Optional[MutableSequence[str]] = None, apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates software on the selected nodes of the @@ -1199,7 +1200,7 @@ async def sample_apply_software_update(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1316,7 +1317,7 @@ async def reschedule_maintenance( ] = None, schedule_time: Optional[timestamp_pb2.Timestamp] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Performs the apply phase of the RescheduleMaintenance @@ -1348,7 +1349,7 @@ async def sample_reschedule_maintenance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1455,7 +1456,7 @@ async def list_operations( request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1509,7 +1510,7 @@ async def get_operation( request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1563,7 +1564,7 @@ async def delete_operation( request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1618,7 +1619,7 @@ async def cancel_operation( request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1672,7 +1673,7 @@ async def get_location( request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1726,7 +1727,7 @@ async def list_locations( request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -1782,14 +1783,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 936ea9a..4bea8ce 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -38,7 +38,8 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources + +from google.cloud.memcache_v1beta2 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -471,7 +472,7 @@ def list_instances( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists Instances in a given location. @@ -588,7 +589,7 @@ def get_instance( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_memcache.Instance: r"""Gets details of a single Instance. @@ -692,7 +693,7 @@ def create_instance( instance_id: Optional[str] = None, resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a new Instance in a given location. @@ -846,7 +847,7 @@ def update_instance( update_mask: Optional[field_mask_pb2.FieldMask] = None, resource: Optional[cloud_memcache.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates an existing Instance in a given project and @@ -984,7 +985,7 @@ def update_parameters( update_mask: Optional[field_mask_pb2.FieldMask] = None, parameters: Optional[cloud_memcache.MemcacheParameters] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates the defined Memcached parameters for an existing @@ -1121,7 +1122,7 @@ def delete_instance( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes a single Instance. @@ -1249,7 +1250,7 @@ def apply_parameters( node_ids: Optional[MutableSequence[str]] = None, apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""``ApplyParameters`` restarts the set of specified nodes in order @@ -1393,7 +1394,7 @@ def apply_software_update( node_ids: Optional[MutableSequence[str]] = None, apply_all: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates software on the selected nodes of the @@ -1541,7 +1542,7 @@ def reschedule_maintenance( ] = None, schedule_time: Optional[timestamp_pb2.Timestamp] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Performs the apply phase of the RescheduleMaintenance @@ -1693,7 +1694,7 @@ def list_operations( request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1747,7 +1748,7 @@ def get_operation( request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1801,7 +1802,7 @@ def delete_operation( request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1856,7 +1857,7 @@ def cancel_operation( request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1910,7 +1911,7 @@ def get_location( request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1964,7 +1965,7 @@ def list_locations( request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -2014,14 +2015,9 @@ def list_locations( return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 67a6078..828089f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -25,18 +25,13 @@ from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -import pkg_resources +from google.cloud.memcache_v1beta2 import gapic_version as package_version from google.cloud.memcache_v1beta2.types import cloud_memcache -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-memcache", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class CloudMemcacheTransport(abc.ABC): diff --git a/release-please-config.json b/release-please-config.json index 1c8d7b9..e7faaa4 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -4,6 +4,8 @@ ".": { "release-type": "python", "extra-files": [ + "google/cloud/memcache_v1beta2/gapic_version.py", + "google/cloud/memcache_v1/gapic_version.py", "google/cloud/memcache/gapic_version.py", { "type": "json", diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py index 0bfffad..4d7ed2e 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py @@ -48,7 +48,7 @@ async def sample_apply_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py index 58b8f08..3e8e97d 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py @@ -56,7 +56,7 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py index b6a4f6c..c74323e 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py @@ -48,7 +48,7 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py index 1ebd88f..11b8744 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py @@ -49,7 +49,7 @@ async def sample_reschedule_maintenance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py index 7db284c..257fc15 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py @@ -54,7 +54,7 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py index 231298f..94949ec 100644 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py @@ -48,7 +48,7 @@ async def sample_update_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py index 3504b1b..e2f69e7 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py @@ -48,7 +48,7 @@ async def sample_apply_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py index 0dc515a..23486a7 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py @@ -48,7 +48,7 @@ async def sample_apply_software_update(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py index ca5e8de..bba1842 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py @@ -56,7 +56,7 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py index 3ee0113..90e9057 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py @@ -48,7 +48,7 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py index 867988e..c700a67 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py @@ -49,7 +49,7 @@ async def sample_reschedule_maintenance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py index 80ce316..f401e3a 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py @@ -54,7 +54,7 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py index 3e50251..4606935 100644 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py +++ b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py @@ -48,7 +48,7 @@ async def sample_update_parameters(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/setup.py b/setup.py index cad6f1c..a594894 100644 --- a/setup.py +++ b/setup.py @@ -36,7 +36,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 6f3158c..6c44adf 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 From 7b26c8abf0d275fed01a107624beffd28097d6d3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Dec 2022 19:02:39 +0000 Subject: [PATCH 139/159] build(deps): bump certifi from 2022.9.24 to 2022.12.7 [autoapprove] (#233) Source-Link: https://togithub.com/googleapis/synthtool/commit/b4fe62efb5114b6738ad4b13d6f654f2bf4b7cc0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 6 +++--- .pre-commit-config.yaml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3f1ccc0..fccaa8e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 9c1b9be..05dc467 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46d2371..5405cc8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 From 9591161769067e636a8f13e3c64041face639d27 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 14 Dec 2022 16:02:53 -0500 Subject: [PATCH 140/159] chore(main): release 1.5.0 (#230) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 23 +++++++++++++++++++ google/cloud/memcache/gapic_version.py | 2 +- google/cloud/memcache_v1/gapic_version.py | 2 +- .../cloud/memcache_v1beta2/gapic_version.py | 2 +- ...pet_metadata_google.cloud.memcache.v1.json | 2 +- ...etadata_google.cloud.memcache.v1beta2.json | 2 +- 7 files changed, 29 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 50f0c45..dd8fde7 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.4.4" + ".": "1.5.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 68a6eb4..eb98345 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [1.5.0](https://github.com/googleapis/python-memcache/compare/v1.4.4...v1.5.0) (2022-12-14) + + +### Features + +* Add support for `google.cloud.memcache.__version__` ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) +* Add typing to proto.Message based class attributes ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) +* Maintenance schedules ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) + + +### Bug Fixes + +* Add dict typing for client_options ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) +* Drop usage of pkg_resources ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) +* Fix timeout default values ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) + ## [1.4.4](https://github.com/googleapis/python-memcache/compare/v1.4.3...v1.4.4) (2022-10-07) diff --git a/google/cloud/memcache/gapic_version.py b/google/cloud/memcache/gapic_version.py index 25e4dd6..997edc1 100644 --- a/google/cloud/memcache/gapic_version.py +++ b/google/cloud/memcache/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.4" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/gapic_version.py b/google/cloud/memcache_v1/gapic_version.py index 25e4dd6..997edc1 100644 --- a/google/cloud/memcache_v1/gapic_version.py +++ b/google/cloud/memcache_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.4" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1beta2/gapic_version.py b/google/cloud/memcache_v1beta2/gapic_version.py index 25e4dd6..997edc1 100644 --- a/google/cloud/memcache_v1beta2/gapic_version.py +++ b/google/cloud/memcache_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.4" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 4fbe6f0..b55122b 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.5.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index a19a59a..7d1518d 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.5.0" }, "snippets": [ { From 0200065f58f8dbc6011c75131c49064bfa6f24ba Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 11:46:17 +0000 Subject: [PATCH 141/159] chore(python): add support for python 3.11 [autoapprove] (#235) Source-Link: https://togithub.com/googleapis/synthtool/commit/7197a001ffb6d8ce7b0b9b11c280f0c536c1033a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 --- .github/.OwlBot.lock.yaml | 4 +- .github/workflows/unittest.yml | 2 +- .kokoro/samples/python3.11/common.cfg | 40 ++++++++++++++++++++ .kokoro/samples/python3.11/continuous.cfg | 6 +++ .kokoro/samples/python3.11/periodic-head.cfg | 11 ++++++ .kokoro/samples/python3.11/periodic.cfg | 6 +++ .kokoro/samples/python3.11/presubmit.cfg | 6 +++ CONTRIBUTING.rst | 6 ++- noxfile.py | 2 +- 9 files changed, 77 insertions(+), 6 deletions(-) create mode 100644 .kokoro/samples/python3.11/common.cfg create mode 100644 .kokoro/samples/python3.11/continuous.cfg create mode 100644 .kokoro/samples/python3.11/periodic-head.cfg create mode 100644 .kokoro/samples/python3.11/periodic.cfg create mode 100644 .kokoro/samples/python3.11/presubmit.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index fccaa8e..889f77d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 + digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 23000c0..8057a76 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg new file mode 100644 index 0000000..b7f70aa --- /dev/null +++ b/.kokoro/samples/python3.11/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.11" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-311" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.11/continuous.cfg b/.kokoro/samples/python3.11/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.11/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg new file mode 100644 index 0000000..aa527a5 --- /dev/null +++ b/.kokoro/samples/python3.11/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.11/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg new file mode 100644 index 0000000..71cd1e5 --- /dev/null +++ b/.kokoro/samples/python3.11/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.11/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.11/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 8d6353b..91abec0 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.10 -- -k + $ nox -s unit-3.11 -- -k .. note:: @@ -225,11 +225,13 @@ We support: - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ +- `Python 3.11`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/noxfile.py b/noxfile.py index d8440c0..e716318 100644 --- a/noxfile.py +++ b/noxfile.py @@ -32,7 +32,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", From 36b98c5e33ea1f707d45d1e0d4cf91032d789a6e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 22:52:54 -0500 Subject: [PATCH 142/159] feat: Add support for python 3.11 (#236) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add support for python 3.11 chore: Update gapic-generator-python to v1.8.0 PiperOrigin-RevId: 500768693 Source-Link: https://github.com/googleapis/googleapis/commit/190b612e3d0ff8f025875a669e5d68a1446d43c1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7bf29a414b9ecac3170f0b65bdc2a95705c0ef1a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2JmMjlhNDE0YjllY2FjMzE3MGYwYjY1YmRjMmE5NTcwNWMwZWYxYSJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../memcache_v1/services/cloud_memcache/async_client.py | 2 +- google/cloud/memcache_v1/services/cloud_memcache/client.py | 2 +- .../services/cloud_memcache/async_client.py | 2 +- .../memcache_v1beta2/services/cloud_memcache/client.py | 2 +- .../snippet_metadata_google.cloud.memcache.v1.json | 2 +- .../snippet_metadata_google.cloud.memcache.v1beta2.json | 2 +- setup.py | 2 ++ testing/constraints-3.12.txt | 6 ++++++ 8 files changed, 14 insertions(+), 6 deletions(-) create mode 100644 testing/constraints-3.12.txt diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index 819dc9a..4be06f5 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -161,7 +161,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index d00e4f2..7dd9eef 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -315,7 +315,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index f3f032f..37f3efe 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -161,7 +161,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 4bea8ce..2ecc7ce 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -315,7 +315,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index b55122b..4fbe6f0 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.5.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index 7d1518d..a19a59a 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.5.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/setup.py b/setup.py index a594894..6a85eb8 100644 --- a/setup.py +++ b/setup.py @@ -38,6 +38,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-memcache" @@ -77,6 +78,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf From adf5bd2d93345930abe3ab8d57863a9e83bb897e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 10 Jan 2023 14:03:25 -0500 Subject: [PATCH 143/159] chore(main): release 1.6.0 (#237) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/memcache/gapic_version.py | 2 +- google/cloud/memcache_v1/gapic_version.py | 2 +- google/cloud/memcache_v1beta2/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.memcache.v1.json | 2 +- .../snippet_metadata_google.cloud.memcache.v1beta2.json | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index dd8fde7..0d1bebe 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.5.0" + ".": "1.6.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index eb98345..72d69fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.6.0](https://github.com/googleapis/python-memcache/compare/v1.5.0...v1.6.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#236](https://github.com/googleapis/python-memcache/issues/236)) ([36b98c5](https://github.com/googleapis/python-memcache/commit/36b98c5e33ea1f707d45d1e0d4cf91032d789a6e)) + ## [1.5.0](https://github.com/googleapis/python-memcache/compare/v1.4.4...v1.5.0) (2022-12-14) diff --git a/google/cloud/memcache/gapic_version.py b/google/cloud/memcache/gapic_version.py index 997edc1..a016bdf 100644 --- a/google/cloud/memcache/gapic_version.py +++ b/google/cloud/memcache/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "1.6.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/gapic_version.py b/google/cloud/memcache_v1/gapic_version.py index 997edc1..a016bdf 100644 --- a/google/cloud/memcache_v1/gapic_version.py +++ b/google/cloud/memcache_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "1.6.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1beta2/gapic_version.py b/google/cloud/memcache_v1beta2/gapic_version.py index 997edc1..a016bdf 100644 --- a/google/cloud/memcache_v1beta2/gapic_version.py +++ b/google/cloud/memcache_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "1.6.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 4fbe6f0..136b2f8 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.6.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index a19a59a..95573dd 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.6.0" }, "snippets": [ { From 29246d4f4dc201a3faab34b3cb16f8629289be82 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Jan 2023 16:04:44 -0500 Subject: [PATCH 144/159] docs: Add documentation for enums (#238) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Add documentation for enums fix: Add context manager return types chore: Update gapic-generator-python to v1.8.1 PiperOrigin-RevId: 503210727 Source-Link: https://github.com/googleapis/googleapis/commit/a391fd1dac18dfdfa00c18c8404f2c3a6ff8e98e Source-Link: https://github.com/googleapis/googleapis-gen/commit/0080f830dec37c3384157082bce279e37079ea58 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/cloud_memcache/client.py | 2 +- .../cloud/memcache_v1/types/cloud_memcache.py | 70 +++++++++++++++++-- .../services/cloud_memcache/client.py | 2 +- .../memcache_v1beta2/types/cloud_memcache.py | 70 +++++++++++++++++-- ...pet_metadata_google.cloud.memcache.v1.json | 2 +- ...etadata_google.cloud.memcache.v1beta2.json | 2 +- 6 files changed, 134 insertions(+), 14 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 7dd9eef..7c3bfe9 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -1529,7 +1529,7 @@ def sample_reschedule_maintenance(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "CloudMemcacheClient": return self def __exit__(self, type, value, traceback): diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index bc1947e..8c552e9 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -48,7 +48,14 @@ class MemcacheVersion(proto.Enum): - r"""Memcached versions supported by our service.""" + r"""Memcached versions supported by our service. + + Values: + MEMCACHE_VERSION_UNSPECIFIED (0): + + MEMCACHE_1_5 (1): + Memcached 1.5 version. + """ MEMCACHE_VERSION_UNSPECIFIED = 0 MEMCACHE_1_5 = 1 @@ -138,7 +145,25 @@ class Instance(proto.Message): """ class State(proto.Enum): - r"""Different states of a Memcached instance.""" + r"""Different states of a Memcached instance. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + Memcached instance is being created. + READY (2): + Memcached instance has been created and ready + to be used. + UPDATING (3): + Memcached instance is updating configuration + such as maintenance policy and schedule. + DELETING (4): + Memcached instance is being deleted. + PERFORMING_MAINTENANCE (5): + Memcached instance is going through + maintenance, e.g. data plane rollout. + """ STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -193,7 +218,20 @@ class Node(proto.Message): """ class State(proto.Enum): - r"""Different states of a Memcached node.""" + r"""Different states of a Memcached node. + + Values: + STATE_UNSPECIFIED (0): + Node state is not set. + CREATING (1): + Node is being created. + READY (2): + Node has been created and ready to be used. + DELETING (3): + Node is being deleted. + UPDATING (4): + Node is being updated. + """ STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -240,7 +278,14 @@ class InstanceMessage(proto.Message): """ class Code(proto.Enum): - r"""""" + r""" + + Values: + CODE_UNSPECIFIED (0): + Message Code not set. + ZONE_DISTRIBUTION_UNBALANCED (1): + Memcached nodes are distributed unevenly. + """ CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 @@ -465,7 +510,22 @@ class RescheduleMaintenanceRequest(proto.Message): """ class RescheduleType(proto.Enum): - r"""Reschedule options.""" + r"""Reschedule options. + + Values: + RESCHEDULE_TYPE_UNSPECIFIED (0): + Not set. + IMMEDIATE (1): + If the user wants to schedule the maintenance + to happen now. + NEXT_AVAILABLE_WINDOW (2): + If the user wants to use the existing + maintenance policy to find the next available + window. + SPECIFIC_TIME (3): + If the user wants to reschedule the + maintenance to a specific time. + """ RESCHEDULE_TYPE_UNSPECIFIED = 0 IMMEDIATE = 1 NEXT_AVAILABLE_WINDOW = 2 diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 2ecc7ce..865ea44 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -1676,7 +1676,7 @@ def sample_reschedule_maintenance(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "CloudMemcacheClient": return self def __exit__(self, type, value, traceback): diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 6cd3f19..2809a23 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -49,7 +49,14 @@ class MemcacheVersion(proto.Enum): - r"""Memcached versions supported by our service.""" + r"""Memcached versions supported by our service. + + Values: + MEMCACHE_VERSION_UNSPECIFIED (0): + + MEMCACHE_1_5 (1): + Memcached 1.5 version. + """ MEMCACHE_VERSION_UNSPECIFIED = 0 MEMCACHE_1_5 = 1 @@ -143,7 +150,25 @@ class Instance(proto.Message): """ class State(proto.Enum): - r"""Different states of a Memcached instance.""" + r"""Different states of a Memcached instance. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + Memcached instance is being created. + READY (2): + Memcached instance has been created and ready + to be used. + UPDATING (3): + Memcached instance is updating configuration + such as maintenance policy and schedule. + DELETING (4): + Memcached instance is being deleted. + PERFORMING_MAINTENANCE (5): + Memcached instance is going through + maintenance, e.g. data plane rollout. + """ STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -201,7 +226,20 @@ class Node(proto.Message): """ class State(proto.Enum): - r"""Different states of a Memcached node.""" + r"""Different states of a Memcached node. + + Values: + STATE_UNSPECIFIED (0): + Node state is not set. + CREATING (1): + Node is being created. + READY (2): + Node has been created and ready to be used. + DELETING (3): + Node is being deleted. + UPDATING (4): + Node is being updated. + """ STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -252,7 +290,14 @@ class InstanceMessage(proto.Message): """ class Code(proto.Enum): - r"""""" + r""" + + Values: + CODE_UNSPECIFIED (0): + Message Code not set. + ZONE_DISTRIBUTION_UNBALANCED (1): + Memcached nodes are distributed unevenly. + """ CODE_UNSPECIFIED = 0 ZONE_DISTRIBUTION_UNBALANCED = 1 @@ -673,7 +718,22 @@ class RescheduleMaintenanceRequest(proto.Message): """ class RescheduleType(proto.Enum): - r"""Reschedule options.""" + r"""Reschedule options. + + Values: + RESCHEDULE_TYPE_UNSPECIFIED (0): + Not set. + IMMEDIATE (1): + If the user wants to schedule the maintenance + to happen now. + NEXT_AVAILABLE_WINDOW (2): + If the user wants to use the existing + maintenance policy to find the next available + window. + SPECIFIC_TIME (3): + If the user wants to reschedule the + maintenance to a specific time. + """ RESCHEDULE_TYPE_UNSPECIFIED = 0 IMMEDIATE = 1 NEXT_AVAILABLE_WINDOW = 2 diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 136b2f8..4fbe6f0 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.6.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index 95573dd..a19a59a 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.6.0" + "version": "0.1.0" }, "snippets": [ { From 45f7d75841e7640f2b3378131b09c07b4f861adf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Jan 2023 10:38:07 -0500 Subject: [PATCH 145/159] chore(main): release 1.6.1 (#239) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 12 ++++++++++++ google/cloud/memcache/gapic_version.py | 2 +- google/cloud/memcache_v1/gapic_version.py | 2 +- google/cloud/memcache_v1beta2/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.memcache.v1.json | 2 +- ...ippet_metadata_google.cloud.memcache.v1beta2.json | 2 +- 7 files changed, 18 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 0d1bebe..093be7e 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.6.0" + ".": "1.6.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 72d69fb..e5d22cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [1.6.1](https://github.com/googleapis/python-memcache/compare/v1.6.0...v1.6.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([29246d4](https://github.com/googleapis/python-memcache/commit/29246d4f4dc201a3faab34b3cb16f8629289be82)) + + +### Documentation + +* Add documentation for enums ([29246d4](https://github.com/googleapis/python-memcache/commit/29246d4f4dc201a3faab34b3cb16f8629289be82)) + ## [1.6.0](https://github.com/googleapis/python-memcache/compare/v1.5.0...v1.6.0) (2023-01-10) diff --git a/google/cloud/memcache/gapic_version.py b/google/cloud/memcache/gapic_version.py index a016bdf..b4028ab 100644 --- a/google/cloud/memcache/gapic_version.py +++ b/google/cloud/memcache/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.0" # {x-release-please-version} +__version__ = "1.6.1" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/gapic_version.py b/google/cloud/memcache_v1/gapic_version.py index a016bdf..b4028ab 100644 --- a/google/cloud/memcache_v1/gapic_version.py +++ b/google/cloud/memcache_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.0" # {x-release-please-version} +__version__ = "1.6.1" # {x-release-please-version} diff --git a/google/cloud/memcache_v1beta2/gapic_version.py b/google/cloud/memcache_v1beta2/gapic_version.py index a016bdf..b4028ab 100644 --- a/google/cloud/memcache_v1beta2/gapic_version.py +++ b/google/cloud/memcache_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.0" # {x-release-please-version} +__version__ = "1.6.1" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 4fbe6f0..ee47435 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.6.1" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index a19a59a..56253ca 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.6.1" }, "snippets": [ { From a77cb9225f26356ae38790277f650974f913a591 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 11:33:40 -0500 Subject: [PATCH 146/159] chore: Update gapic-generator-python to v1.8.2 (#240) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.2 PiperOrigin-RevId: 504289125 Source-Link: https://github.com/googleapis/googleapis/commit/38a48a44a44279e9cf9f2f864b588958a2d87491 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2dc22663dbe47a972c8d8c2f8a4df013dafdcbc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJkYzIyNjYzZGJlNDdhOTcyYzhkOGMyZjhhNGRmMDEzZGFmZGNiYyJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .coveragerc | 1 + google/cloud/memcache_v1/__init__.py | 2 +- google/cloud/memcache_v1/types/cloud_memcache.py | 2 +- google/cloud/memcache_v1beta2/__init__.py | 2 +- google/cloud/memcache_v1beta2/types/cloud_memcache.py | 2 +- .../snippet_metadata_google.cloud.memcache.v1.json | 2 +- .../snippet_metadata_google.cloud.memcache.v1beta2.json | 2 +- 7 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.coveragerc b/.coveragerc index c090f54..b16a38f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/memcache/__init__.py + google/cloud/memcache/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py index 02ebf81..cbaf61d 100644 --- a/google/cloud/memcache_v1/__init__.py +++ b/google/cloud/memcache_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.memcache import gapic_version as package_version +from google.cloud.memcache_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index 8c552e9..cc2fd53 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -52,7 +52,7 @@ class MemcacheVersion(proto.Enum): Values: MEMCACHE_VERSION_UNSPECIFIED (0): - + No description available. MEMCACHE_1_5 (1): Memcached 1.5 version. """ diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index a655516..1e8afba 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.memcache import gapic_version as package_version +from google.cloud.memcache_v1beta2 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 2809a23..13cb54e 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -53,7 +53,7 @@ class MemcacheVersion(proto.Enum): Values: MEMCACHE_VERSION_UNSPECIFIED (0): - + No description available. MEMCACHE_1_5 (1): Memcached 1.5 version. """ diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index ee47435..4fbe6f0 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.6.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index 56253ca..a19a59a 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.6.1" + "version": "0.1.0" }, "snippets": [ { From c7af7cde787fdb40bafc5bb07f6fb3f9fabfd529 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Jan 2023 16:50:19 +0000 Subject: [PATCH 147/159] chore: fix prerelease_deps nox session [autoapprove] (#241) Source-Link: https://togithub.com/googleapis/synthtool/commit/26c7505b2f76981ec1707b851e1595c8c06e90fc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 --- .github/.OwlBot.lock.yaml | 2 +- noxfile.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 889f77d..f0f3b24 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 + digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 diff --git a/noxfile.py b/noxfile.py index e716318..95e58c5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -346,9 +346,7 @@ def prerelease_deps(session): unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES ) session.install(*system_deps_all) @@ -378,8 +376,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", From 310f65dc6ea63dc70aceaafe1b6af670200496c0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 07:08:54 -0500 Subject: [PATCH 148/159] chore: Update gapic-generator-python to v1.8.4 (#242) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.4 PiperOrigin-RevId: 507808936 Source-Link: https://github.com/googleapis/googleapis/commit/64cf8492b21778ce62c66ecee81b468a293bfd4c Source-Link: https://github.com/googleapis/googleapis-gen/commit/53c48cac153d3b37f3d2c2dec4830cfd91ec4153 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTNjNDhjYWMxNTNkM2IzN2YzZDJjMmRlYzQ4MzBjZmQ5MWVjNDE1MyJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- setup.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 6a85eb8..a8ab221 100644 --- a/setup.py +++ b/setup.py @@ -55,9 +55,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, From 87bf82908960bc0c156d9b69322813d75ca0a629 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 15:14:37 +0000 Subject: [PATCH 149/159] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#244) Source-Link: https://togithub.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 49 ++++++++++++++++++--------------------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f0f3b24..894fb6b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 05dc467..096e480 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From f702f7a08d7a31689e400969d23e4a1d8637dd41 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Feb 2023 15:32:47 -0500 Subject: [PATCH 150/159] feat: enable "rest" transport in Python for services supporting numeric enums (#245) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: enable "rest" transport in Python for services supporting numeric enums PiperOrigin-RevId: 508143576 Source-Link: https://github.com/googleapis/googleapis/commit/7a702a989db3b413f39ff8994ca53fb38b6928c2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6ad1279c0e7aa787ac6b66c9fd4a210692edffcd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmFkMTI3OWMwZTdhYTc4N2FjNmI2NmM5ZmQ0YTIxMDY5MmVkZmZjZCJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/memcache_v1/gapic_metadata.json | 45 + .../services/cloud_memcache/client.py | 2 + .../cloud_memcache/transports/__init__.py | 4 + .../cloud_memcache/transports/rest.py | 1877 +++++++++ .../memcache_v1beta2/gapic_metadata.json | 50 + .../services/cloud_memcache/client.py | 2 + .../cloud_memcache/transports/__init__.py | 4 + .../cloud_memcache/transports/rest.py | 2018 +++++++++ .../gapic/memcache_v1/test_cloud_memcache.py | 3214 ++++++++++++++- .../memcache_v1beta2/test_cloud_memcache.py | 3639 ++++++++++++++++- 10 files changed, 10510 insertions(+), 345 deletions(-) create mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py create mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py diff --git a/google/cloud/memcache_v1/gapic_metadata.json b/google/cloud/memcache_v1/gapic_metadata.json index 8c92ac7..2c6ccb4 100644 --- a/google/cloud/memcache_v1/gapic_metadata.json +++ b/google/cloud/memcache_v1/gapic_metadata.json @@ -96,6 +96,51 @@ ] } } + }, + "rest": { + "libraryClient": "CloudMemcacheClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } } } } diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 7c3bfe9..65dd566 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -60,6 +60,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport +from .transports.rest import CloudMemcacheRestTransport class CloudMemcacheClientMeta(type): @@ -73,6 +74,7 @@ class CloudMemcacheClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] _transport_registry["grpc"] = CloudMemcacheGrpcTransport _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport + _transport_registry["rest"] = CloudMemcacheRestTransport def get_transport_class( cls, diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py index ab7c86d..8e6a821 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py @@ -19,14 +19,18 @@ from .base import CloudMemcacheTransport from .grpc import CloudMemcacheGrpcTransport from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport +from .rest import CloudMemcacheRestInterceptor, CloudMemcacheRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] _transport_registry["grpc"] = CloudMemcacheGrpcTransport _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport +_transport_registry["rest"] = CloudMemcacheRestTransport __all__ = ( "CloudMemcacheTransport", "CloudMemcacheGrpcTransport", "CloudMemcacheGrpcAsyncIOTransport", + "CloudMemcacheRestTransport", + "CloudMemcacheRestInterceptor", ) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py new file mode 100644 index 0000000..3456e6f --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py @@ -0,0 +1,1877 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.memcache_v1.types import cloud_memcache + +from .base import CloudMemcacheTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudMemcacheRestInterceptor: + """Interceptor for CloudMemcache. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudMemcacheRestTransport. + + .. code-block:: python + class MyCustomCloudMemcacheInterceptor(CloudMemcacheRestInterceptor): + def pre_apply_parameters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_apply_parameters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reschedule_maintenance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reschedule_maintenance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_parameters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_parameters(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudMemcacheRestTransport(interceptor=MyCustomCloudMemcacheInterceptor()) + client = CloudMemcacheClient(transport=transport) + + + """ + + def pre_apply_parameters( + self, + request: cloud_memcache.ApplyParametersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.ApplyParametersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for apply_parameters + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_apply_parameters( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for apply_parameters + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_create_instance( + self, + request: cloud_memcache.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_delete_instance( + self, + request: cloud_memcache.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_get_instance( + self, + request: cloud_memcache.GetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_get_instance( + self, response: cloud_memcache.Instance + ) -> cloud_memcache.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_list_instances( + self, + request: cloud_memcache.ListInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_list_instances( + self, response: cloud_memcache.ListInstancesResponse + ) -> cloud_memcache.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_reschedule_maintenance( + self, + request: cloud_memcache.RescheduleMaintenanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_reschedule_maintenance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_update_instance( + self, + request: cloud_memcache.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_update_parameters( + self, + request: cloud_memcache.UpdateParametersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.UpdateParametersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_parameters + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_update_parameters( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_parameters + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.Location: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.GetLocationRequest + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.ListLocationsResponse: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsRequest + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_cancel_operation( + self, response: operations_pb2.CancelOperationRequest + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_delete_operation( + self, response: operations_pb2.DeleteOperationRequest + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.Operation: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.GetOperationRequest + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.ListOperationsResponse: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsRequest + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudMemcacheRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudMemcacheRestInterceptor + + +class CloudMemcacheRestTransport(CloudMemcacheTransport): + """REST backend transport for CloudMemcache. + + Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudMemcacheRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudMemcacheRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ApplyParameters(CloudMemcacheRestStub): + def __hash__(self): + return hash("ApplyParameters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.ApplyParametersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the apply parameters method over HTTP. + + Args: + request (~.cloud_memcache.ApplyParametersRequest): + The request object. Request for + [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:applyParameters", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_apply_parameters( + request, metadata + ) + pb_request = cloud_memcache.ApplyParametersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_apply_parameters(resp) + return resp + + class _CreateInstance(CloudMemcacheRestStub): + def __hash__(self): + return hash("CreateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.cloud_memcache.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_create_instance(request, metadata) + pb_request = cloud_memcache.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _DeleteInstance(CloudMemcacheRestStub): + def __hash__(self): + return hash("DeleteInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.cloud_memcache.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + pb_request = cloud_memcache.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _GetInstance(CloudMemcacheRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.cloud_memcache.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_memcache.Instance: + A Memorystore for Memcached instance + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = cloud_memcache.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_memcache.Instance() + pb_resp = cloud_memcache.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ListInstances(CloudMemcacheRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.cloud_memcache.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_memcache.ListInstancesResponse: + Response for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = cloud_memcache.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_memcache.ListInstancesResponse() + pb_resp = cloud_memcache.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _RescheduleMaintenance(CloudMemcacheRestStub): + def __hash__(self): + return hash("RescheduleMaintenance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.RescheduleMaintenanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the reschedule maintenance method over HTTP. + + Args: + request (~.cloud_memcache.RescheduleMaintenanceRequest): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{instance=projects/*/locations/*/instances/*}:rescheduleMaintenance", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_reschedule_maintenance( + request, metadata + ) + pb_request = cloud_memcache.RescheduleMaintenanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reschedule_maintenance(resp) + return resp + + class _UpdateInstance(CloudMemcacheRestStub): + def __hash__(self): + return hash("UpdateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.cloud_memcache.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, + ] + request, metadata = self._interceptor.pre_update_instance(request, metadata) + pb_request = cloud_memcache.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + class _UpdateParameters(CloudMemcacheRestStub): + def __hash__(self): + return hash("UpdateParameters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.UpdateParametersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update parameters method over HTTP. + + Args: + request (~.cloud_memcache.UpdateParametersRequest): + The request object. Request for + [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:updateParameters", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_parameters( + request, metadata + ) + pb_request = cloud_memcache.UpdateParametersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_parameters(resp) + return resp + + @property + def apply_parameters( + self, + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApplyParameters(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance( + self, + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance( + self, + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance( + self, + ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances( + self, + ) -> Callable[ + [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance( + self, + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_parameters( + self, + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateParameters(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudMemcacheRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudMemcacheRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(CloudMemcacheRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(CloudMemcacheRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(CloudMemcacheRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(CloudMemcacheRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudMemcacheRestTransport",) diff --git a/google/cloud/memcache_v1beta2/gapic_metadata.json b/google/cloud/memcache_v1beta2/gapic_metadata.json index 4143466..265ae99 100644 --- a/google/cloud/memcache_v1beta2/gapic_metadata.json +++ b/google/cloud/memcache_v1beta2/gapic_metadata.json @@ -106,6 +106,56 @@ ] } } + }, + "rest": { + "libraryClient": "CloudMemcacheClient", + "rpcs": { + "ApplyParameters": { + "methods": [ + "apply_parameters" + ] + }, + "ApplySoftwareUpdate": { + "methods": [ + "apply_software_update" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateParameters": { + "methods": [ + "update_parameters" + ] + } + } } } } diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 865ea44..6f877bd 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -60,6 +60,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport from .transports.grpc import CloudMemcacheGrpcTransport from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport +from .transports.rest import CloudMemcacheRestTransport class CloudMemcacheClientMeta(type): @@ -73,6 +74,7 @@ class CloudMemcacheClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] _transport_registry["grpc"] = CloudMemcacheGrpcTransport _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport + _transport_registry["rest"] = CloudMemcacheRestTransport def get_transport_class( cls, diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py index ab7c86d..8e6a821 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py @@ -19,14 +19,18 @@ from .base import CloudMemcacheTransport from .grpc import CloudMemcacheGrpcTransport from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport +from .rest import CloudMemcacheRestInterceptor, CloudMemcacheRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] _transport_registry["grpc"] = CloudMemcacheGrpcTransport _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport +_transport_registry["rest"] = CloudMemcacheRestTransport __all__ = ( "CloudMemcacheTransport", "CloudMemcacheGrpcTransport", "CloudMemcacheGrpcAsyncIOTransport", + "CloudMemcacheRestTransport", + "CloudMemcacheRestInterceptor", ) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py new file mode 100644 index 0000000..d5c5170 --- /dev/null +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py @@ -0,0 +1,2018 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.memcache_v1beta2.types import cloud_memcache + +from .base import CloudMemcacheTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudMemcacheRestInterceptor: + """Interceptor for CloudMemcache. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudMemcacheRestTransport. + + .. code-block:: python + class MyCustomCloudMemcacheInterceptor(CloudMemcacheRestInterceptor): + def pre_apply_parameters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_apply_parameters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_apply_software_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_apply_software_update(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reschedule_maintenance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reschedule_maintenance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_parameters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_parameters(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudMemcacheRestTransport(interceptor=MyCustomCloudMemcacheInterceptor()) + client = CloudMemcacheClient(transport=transport) + + + """ + + def pre_apply_parameters( + self, + request: cloud_memcache.ApplyParametersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.ApplyParametersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for apply_parameters + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_apply_parameters( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for apply_parameters + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_apply_software_update( + self, + request: cloud_memcache.ApplySoftwareUpdateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.ApplySoftwareUpdateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for apply_software_update + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_apply_software_update( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for apply_software_update + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_create_instance( + self, + request: cloud_memcache.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_delete_instance( + self, + request: cloud_memcache.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_get_instance( + self, + request: cloud_memcache.GetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_get_instance( + self, response: cloud_memcache.Instance + ) -> cloud_memcache.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_list_instances( + self, + request: cloud_memcache.ListInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_list_instances( + self, response: cloud_memcache.ListInstancesResponse + ) -> cloud_memcache.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_reschedule_maintenance( + self, + request: cloud_memcache.RescheduleMaintenanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_reschedule_maintenance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_update_instance( + self, + request: cloud_memcache.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_update_parameters( + self, + request: cloud_memcache.UpdateParametersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_memcache.UpdateParametersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_parameters + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_update_parameters( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_parameters + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.Location: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.GetLocationRequest + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.ListLocationsResponse: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsRequest + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_cancel_operation( + self, response: operations_pb2.CancelOperationRequest + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_delete_operation( + self, response: operations_pb2.DeleteOperationRequest + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.Operation: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.GetOperationRequest + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.ListOperationsResponse: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudMemcache server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsRequest + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudMemcache server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudMemcacheRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudMemcacheRestInterceptor + + +class CloudMemcacheRestTransport(CloudMemcacheTransport): + """REST backend transport for CloudMemcache. + + Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudMemcacheRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudMemcacheRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta2", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ApplyParameters(CloudMemcacheRestStub): + def __hash__(self): + return hash("ApplyParameters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.ApplyParametersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the apply parameters method over HTTP. + + Args: + request (~.cloud_memcache.ApplyParametersRequest): + The request object. Request for + [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{name=projects/*/locations/*/instances/*}:applyParameters", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_apply_parameters( + request, metadata + ) + pb_request = cloud_memcache.ApplyParametersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_apply_parameters(resp) + return resp + + class _ApplySoftwareUpdate(CloudMemcacheRestStub): + def __hash__(self): + return hash("ApplySoftwareUpdate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.ApplySoftwareUpdateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the apply software update method over HTTP. + + Args: + request (~.cloud_memcache.ApplySoftwareUpdateRequest): + The request object. Request for + [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{instance=projects/*/locations/*/instances/*}:applySoftwareUpdate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_apply_software_update( + request, metadata + ) + pb_request = cloud_memcache.ApplySoftwareUpdateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_apply_software_update(resp) + return resp + + class _CreateInstance(CloudMemcacheRestStub): + def __hash__(self): + return hash("CreateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "instanceId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.cloud_memcache.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{parent=projects/*/locations/*}/instances", + "body": "resource", + }, + ] + request, metadata = self._interceptor.pre_create_instance(request, metadata) + pb_request = cloud_memcache.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _DeleteInstance(CloudMemcacheRestStub): + def __hash__(self): + return hash("DeleteInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.cloud_memcache.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta2/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + pb_request = cloud_memcache.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _GetInstance(CloudMemcacheRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.cloud_memcache.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_memcache.Instance: + A Memorystore for Memcached instance + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = cloud_memcache.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_memcache.Instance() + pb_resp = cloud_memcache.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ListInstances(CloudMemcacheRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.cloud_memcache.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_memcache.ListInstancesResponse: + Response for + [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta2/{parent=projects/*/locations/*}/instances", + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = cloud_memcache.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_memcache.ListInstancesResponse() + pb_resp = cloud_memcache.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _RescheduleMaintenance(CloudMemcacheRestStub): + def __hash__(self): + return hash("RescheduleMaintenance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.RescheduleMaintenanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the reschedule maintenance method over HTTP. + + Args: + request (~.cloud_memcache.RescheduleMaintenanceRequest): + The request object. Request for + [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{instance=projects/*/locations/*/instances/*}:rescheduleMaintenance", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_reschedule_maintenance( + request, metadata + ) + pb_request = cloud_memcache.RescheduleMaintenanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reschedule_maintenance(resp) + return resp + + class _UpdateInstance(CloudMemcacheRestStub): + def __hash__(self): + return hash("UpdateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.cloud_memcache.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta2/{resource.name=projects/*/locations/*/instances/*}", + "body": "resource", + }, + ] + request, metadata = self._interceptor.pre_update_instance(request, metadata) + pb_request = cloud_memcache.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + class _UpdateParameters(CloudMemcacheRestStub): + def __hash__(self): + return hash("UpdateParameters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_memcache.UpdateParametersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update parameters method over HTTP. + + Args: + request (~.cloud_memcache.UpdateParametersRequest): + The request object. Request for + [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta2/{name=projects/*/locations/*/instances/*}:updateParameters", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_parameters( + request, metadata + ) + pb_request = cloud_memcache.UpdateParametersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_parameters(resp) + return resp + + @property + def apply_parameters( + self, + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApplyParameters(self._session, self._host, self._interceptor) # type: ignore + + @property + def apply_software_update( + self, + ) -> Callable[ + [cloud_memcache.ApplySoftwareUpdateRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApplySoftwareUpdate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance( + self, + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance( + self, + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance( + self, + ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances( + self, + ) -> Callable[ + [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance( + self, + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_parameters( + self, + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateParameters(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudMemcacheRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudMemcacheRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(CloudMemcacheRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(CloudMemcacheRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(CloudMemcacheRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(CloudMemcacheRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudMemcacheRestTransport",) diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 6c9e884..65c1125 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import ( @@ -45,6 +47,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import timeofday_pb2 # type: ignore @@ -53,6 +56,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.memcache_v1.services.cloud_memcache import ( CloudMemcacheAsyncClient, @@ -112,6 +117,7 @@ def test__get_default_mtls_endpoint(): [ (CloudMemcacheClient, "grpc"), (CloudMemcacheAsyncClient, "grpc_asyncio"), + (CloudMemcacheClient, "rest"), ], ) def test_cloud_memcache_client_from_service_account_info(client_class, transport_name): @@ -125,7 +131,11 @@ def test_cloud_memcache_client_from_service_account_info(client_class, transport assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("memcache.googleapis.com:443") + assert client.transport._host == ( + "memcache.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memcache.googleapis.com" + ) @pytest.mark.parametrize( @@ -133,6 +143,7 @@ def test_cloud_memcache_client_from_service_account_info(client_class, transport [ (transports.CloudMemcacheGrpcTransport, "grpc"), (transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudMemcacheRestTransport, "rest"), ], ) def test_cloud_memcache_client_service_account_always_use_jwt( @@ -158,6 +169,7 @@ def test_cloud_memcache_client_service_account_always_use_jwt( [ (CloudMemcacheClient, "grpc"), (CloudMemcacheAsyncClient, "grpc_asyncio"), + (CloudMemcacheClient, "rest"), ], ) def test_cloud_memcache_client_from_service_account_file(client_class, transport_name): @@ -178,13 +190,18 @@ def test_cloud_memcache_client_from_service_account_file(client_class, transport assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("memcache.googleapis.com:443") + assert client.transport._host == ( + "memcache.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memcache.googleapis.com" + ) def test_cloud_memcache_client_get_transport_class(): transport = CloudMemcacheClient.get_transport_class() available_transports = [ transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheRestTransport, ] assert transport in available_transports @@ -201,6 +218,7 @@ def test_cloud_memcache_client_get_transport_class(): transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio", ), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest"), ], ) @mock.patch.object( @@ -346,6 +364,8 @@ def test_cloud_memcache_client_client_options( "grpc_asyncio", "false", ), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", "true"), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -545,6 +565,7 @@ def test_cloud_memcache_client_get_mtls_endpoint_and_cert_source(client_class): transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio", ), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest"), ], ) def test_cloud_memcache_client_client_options_scopes( @@ -585,6 +606,7 @@ def test_cloud_memcache_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", None), ], ) def test_cloud_memcache_client_client_options_credentials_file( @@ -2860,186 +2882,2672 @@ async def test_reschedule_maintenance_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options=options, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_rest_required_fields( + request_type=cloud_memcache.ListInstancesRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # It is an error to provide scopes and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) + & set(("parent",)) + ) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), ) client = CloudMemcacheClient(transport=transport) - assert client.transport is transport + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.ListInstancesRequest.pb( + cloud_memcache.ListInstancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_memcache.ListInstancesResponse.to_json( + cloud_memcache.ListInstancesResponse() + ) + request = cloud_memcache.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_memcache.ListInstancesResponse() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.ListInstancesRequest +): + client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel - transport = transports.CloudMemcacheGrpcAsyncIOTransport( + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.ListInstancesResponse() -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = CloudMemcacheClient.get_transport_class(transport_name)( + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert transport.kind == transport_name + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_memcache.ListInstancesRequest(), + parent="parent_value", + ) -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_list_instances_rest_pager(transport: str = "rest"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudMemcacheGrpcTransport, + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response -def test_cloud_memcache_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Wrap the values into proper Response objs + response = tuple( + cloud_memcache.ListInstancesResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_cloud_memcache_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + pager = client.list_instances(request=sample_request) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_instances", - "get_instance", - "create_instance", - "update_instance", - "update_parameters", - "delete_instance", - "apply_parameters", - "reschedule_maintenance", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_memcache.Instance) for i in results) - with pytest.raises(NotImplementedError): - transport.close() + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) -def test_cloud_memcache_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.Instance( + name="name_value", + display_name="display_name_value", + authorized_network="authorized_network_value", + zones=["zones_value"], + node_count=1070, + memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, + state=cloud_memcache.Instance.State.CREATING, + memcache_full_version="memcache_full_version_value", + discovery_endpoint="discovery_endpoint_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_memcache.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_memcache.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.authorized_network == "authorized_network_value" + assert response.zones == ["zones_value"] + assert response.node_count == 1070 + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + assert response.state == cloud_memcache.Instance.State.CREATING + assert response.memcache_full_version == "memcache_full_version_value" + assert response.discovery_endpoint == "discovery_endpoint_value" + + +def test_get_instance_rest_required_fields( + request_type=cloud_memcache.GetInstanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_memcache.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.GetInstanceRequest.pb( + cloud_memcache.GetInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_memcache.Instance.to_json( + cloud_memcache.Instance() + ) + + request = cloud_memcache.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_memcache.Instance() + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.GetInstanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_memcache.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) + + +def test_get_instance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_memcache.GetInstanceRequest(), + name="name_value", + ) + + +def test_get_instance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "labels": {}, + "authorized_network": "authorized_network_value", + "zones": ["zones_value1", "zones_value2"], + "node_count": 1070, + "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, + "memcache_version": 1, + "parameters": {"id": "id_value", "params": {}}, + "memcache_nodes": [ + { + "node_id": "node_id_value", + "zone": "zone_value", + "state": 1, + "host": "host_value", + "port": 453, + "parameters": {}, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "memcache_full_version": "memcache_full_version_value", + "instance_messages": [{"code": 1, "message": "message_value"}], + "discovery_endpoint": "discovery_endpoint_value", + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "schedule_deadline_time": {}, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_required_fields( + request_type=cloud_memcache.CreateInstanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("instanceId",)) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_create_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.CreateInstanceRequest.pb( + cloud_memcache.CreateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.CreateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.CreateInstanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "labels": {}, + "authorized_network": "authorized_network_value", + "zones": ["zones_value1", "zones_value2"], + "node_count": 1070, + "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, + "memcache_version": 1, + "parameters": {"id": "id_value", "params": {}}, + "memcache_nodes": [ + { + "node_id": "node_id_value", + "zone": "zone_value", + "state": 1, + "host": "host_value", + "port": 453, + "parameters": {}, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "memcache_full_version": "memcache_full_version_value", + "instance_messages": [{"code": 1, "message": "message_value"}], + "discovery_endpoint": "discovery_endpoint_value", + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "schedule_deadline_time": {}, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instance(request) + + +def test_create_instance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) + + +def test_create_instance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_memcache.CreateInstanceRequest(), + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_create_instance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "display_name": "display_name_value", + "labels": {}, + "authorized_network": "authorized_network_value", + "zones": ["zones_value1", "zones_value2"], + "node_count": 1070, + "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, + "memcache_version": 1, + "parameters": {"id": "id_value", "params": {}}, + "memcache_nodes": [ + { + "node_id": "node_id_value", + "zone": "zone_value", + "state": 1, + "host": "host_value", + "port": 453, + "parameters": {}, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "memcache_full_version": "memcache_full_version_value", + "instance_messages": [{"code": 1, "message": "message_value"}], + "discovery_endpoint": "discovery_endpoint_value", + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "schedule_deadline_time": {}, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_required_fields( + request_type=cloud_memcache.UpdateInstanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "instance", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.UpdateInstanceRequest.pb( + cloud_memcache.UpdateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.UpdateInstanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "display_name": "display_name_value", + "labels": {}, + "authorized_network": "authorized_network_value", + "zones": ["zones_value1", "zones_value2"], + "node_count": 1070, + "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, + "memcache_version": 1, + "parameters": {"id": "id_value", "params": {}}, + "memcache_nodes": [ + { + "node_id": "node_id_value", + "zone": "zone_value", + "state": 1, + "host": "host_value", + "port": 453, + "parameters": {}, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "memcache_full_version": "memcache_full_version_value", + "instance_messages": [{"code": 1, "message": "message_value"}], + "discovery_endpoint": "discovery_endpoint_value", + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "schedule_deadline_time": {}, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_memcache.UpdateInstanceRequest(), + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_instance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.UpdateParametersRequest, + dict, + ], +) +def test_update_parameters_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_parameters(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_parameters_rest_required_fields( + request_type=cloud_memcache.UpdateParametersRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_parameters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_parameters_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_parameters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_parameters_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_update_parameters" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_update_parameters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.UpdateParametersRequest.pb( + cloud_memcache.UpdateParametersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.UpdateParametersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_parameters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_parameters_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.UpdateParametersRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_parameters(request) + + +def test_update_parameters_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_parameters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:updateParameters" + % client.transport._host, + args[1], + ) + + +def test_update_parameters_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_parameters( + cloud_memcache.UpdateParametersRequest(), + name="name_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + +def test_update_parameters_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_instance_rest_required_fields( + request_type=cloud_memcache.DeleteInstanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_delete_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.DeleteInstanceRequest.pb( + cloud_memcache.DeleteInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.DeleteInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.DeleteInstanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instance(request) + + +def test_delete_instance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) + + +def test_delete_instance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_memcache.DeleteInstanceRequest(), + name="name_value", + ) + + +def test_delete_instance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ApplyParametersRequest, + dict, + ], +) +def test_apply_parameters_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.apply_parameters(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_apply_parameters_rest_required_fields( + request_type=cloud_memcache.ApplyParametersRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).apply_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).apply_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.apply_parameters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_apply_parameters_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.apply_parameters._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_apply_parameters_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_apply_parameters" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_apply_parameters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.ApplyParametersRequest.pb( + cloud_memcache.ApplyParametersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.ApplyParametersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.apply_parameters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_apply_parameters_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.ApplyParametersRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_parameters(request) + + +def test_apply_parameters_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.apply_parameters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:applyParameters" + % client.transport._host, + args[1], + ) + + +def test_apply_parameters_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_parameters( + cloud_memcache.ApplyParametersRequest(), + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +def test_apply_parameters_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.RescheduleMaintenanceRequest, + dict, + ], +) +def test_reschedule_maintenance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reschedule_maintenance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_reschedule_maintenance_rest_required_fields( + request_type=cloud_memcache.RescheduleMaintenanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["instance"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = "instance_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == "instance_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reschedule_maintenance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reschedule_maintenance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instance", + "rescheduleType", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reschedule_maintenance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_reschedule_maintenance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_reschedule_maintenance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.RescheduleMaintenanceRequest.pb( + cloud_memcache.RescheduleMaintenanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.RescheduleMaintenanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.reschedule_maintenance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reschedule_maintenance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.RescheduleMaintenanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reschedule_maintenance(request) + + +def test_reschedule_maintenance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.reschedule_maintenance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{instance=projects/*/locations/*/instances/*}:rescheduleMaintenance" + % client.transport._host, + args[1], + ) + + +def test_reschedule_maintenance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_reschedule_maintenance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudMemcacheClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudMemcacheGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + transports.CloudMemcacheRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudMemcacheClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudMemcacheGrpcTransport, + ) + + +def test_cloud_memcache_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudMemcacheTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_memcache_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudMemcacheTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "update_parameters", + "delete_instance", + "apply_parameters", + "reschedule_maintenance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_memcache_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudMemcacheTransport( @@ -3102,6 +5610,7 @@ def test_cloud_memcache_transport_auth_adc(transport_class): [ transports.CloudMemcacheGrpcTransport, transports.CloudMemcacheGrpcAsyncIOTransport, + transports.CloudMemcacheRestTransport, ], ) def test_cloud_memcache_transport_auth_gdch_credentials(transport_class): @@ -3199,11 +5708,40 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla ) +def test_cloud_memcache_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudMemcacheRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_cloud_memcache_rest_lro_client(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cloud_memcache_host_no_port(transport_name): @@ -3214,7 +5752,11 @@ def test_cloud_memcache_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("memcache.googleapis.com:443") + assert client.transport._host == ( + "memcache.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memcache.googleapis.com" + ) @pytest.mark.parametrize( @@ -3222,6 +5764,7 @@ def test_cloud_memcache_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cloud_memcache_host_with_port(transport_name): @@ -3232,7 +5775,54 @@ def test_cloud_memcache_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("memcache.googleapis.com:8000") + assert client.transport._host == ( + "memcache.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memcache.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_memcache_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudMemcacheClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudMemcacheClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.update_parameters._session + session2 = client2.transport.update_parameters._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.apply_parameters._session + session2 = client2.transport.apply_parameters._session + assert session1 != session2 + session1 = client1.transport.reschedule_maintenance._session + session2 = client2.transport.reschedule_maintenance._session + assert session1 != session2 def test_cloud_memcache_grpc_transport_channel(): @@ -3559,6 +6149,352 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + def test_delete_operation(transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4415,6 +7351,7 @@ async def test_get_location_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -4432,6 +7369,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 8fe7f1d..27b0a13 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import ( @@ -45,6 +47,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import timeofday_pb2 # type: ignore @@ -53,6 +56,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.memcache_v1beta2.services.cloud_memcache import ( CloudMemcacheAsyncClient, @@ -112,6 +117,7 @@ def test__get_default_mtls_endpoint(): [ (CloudMemcacheClient, "grpc"), (CloudMemcacheAsyncClient, "grpc_asyncio"), + (CloudMemcacheClient, "rest"), ], ) def test_cloud_memcache_client_from_service_account_info(client_class, transport_name): @@ -125,7 +131,11 @@ def test_cloud_memcache_client_from_service_account_info(client_class, transport assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("memcache.googleapis.com:443") + assert client.transport._host == ( + "memcache.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memcache.googleapis.com" + ) @pytest.mark.parametrize( @@ -133,6 +143,7 @@ def test_cloud_memcache_client_from_service_account_info(client_class, transport [ (transports.CloudMemcacheGrpcTransport, "grpc"), (transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudMemcacheRestTransport, "rest"), ], ) def test_cloud_memcache_client_service_account_always_use_jwt( @@ -158,6 +169,7 @@ def test_cloud_memcache_client_service_account_always_use_jwt( [ (CloudMemcacheClient, "grpc"), (CloudMemcacheAsyncClient, "grpc_asyncio"), + (CloudMemcacheClient, "rest"), ], ) def test_cloud_memcache_client_from_service_account_file(client_class, transport_name): @@ -178,13 +190,18 @@ def test_cloud_memcache_client_from_service_account_file(client_class, transport assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("memcache.googleapis.com:443") + assert client.transport._host == ( + "memcache.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memcache.googleapis.com" + ) def test_cloud_memcache_client_get_transport_class(): transport = CloudMemcacheClient.get_transport_class() available_transports = [ transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheRestTransport, ] assert transport in available_transports @@ -201,6 +218,7 @@ def test_cloud_memcache_client_get_transport_class(): transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio", ), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest"), ], ) @mock.patch.object( @@ -346,6 +364,8 @@ def test_cloud_memcache_client_client_options( "grpc_asyncio", "false", ), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", "true"), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -545,6 +565,7 @@ def test_cloud_memcache_client_get_mtls_endpoint_and_cert_source(client_class): transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio", ), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest"), ], ) def test_cloud_memcache_client_client_options_scopes( @@ -585,6 +606,7 @@ def test_cloud_memcache_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", None), ], ) def test_cloud_memcache_client_client_options_credentials_file( @@ -3125,255 +3147,3026 @@ async def test_reschedule_maintenance_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options=options, - transport=transport, + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) - # It is an error to provide scopes and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instances(request) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudMemcacheClient(transport=transport) - assert client.transport is transport +def test_list_instances_rest_required_fields( + request_type=cloud_memcache.ListInstancesRequest, +): + transport_class = transports.CloudMemcacheRestTransport -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - channel = transport.grpc_channel - assert channel - transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = CloudMemcacheClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) ) - assert transport.kind == transport_name + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - assert isinstance( - client.transport, - transports.CloudMemcacheGrpcTransport, + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) ) -def test_cloud_memcache_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.ListInstancesRequest.pb( + cloud_memcache.ListInstancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_memcache.ListInstancesResponse.to_json( + cloud_memcache.ListInstancesResponse() ) + request = cloud_memcache.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_memcache.ListInstancesResponse() -def test_cloud_memcache_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_instances", - "get_instance", - "create_instance", - "update_instance", - "update_parameters", - "delete_instance", - "apply_parameters", - "apply_software_update", - "reschedule_maintenance", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + pre.assert_called_once() + post.assert_called_once() - with pytest.raises(NotImplementedError): - transport.close() - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.ListInstancesRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) -def test_cloud_memcache_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) +def test_list_instances_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -def test_cloud_memcache_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport() - adc.assert_called_once() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.ListInstancesResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_cloud_memcache_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudMemcacheClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{parent=projects/*/locations/*}/instances" + % client.transport._host, + args[1], ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_memcache.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_rest_pager(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse( + resources=[], + next_page_token="def", + ), + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + ], + next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + resources=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloud_memcache.ListInstancesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_memcache.Instance) for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.Instance( + name="name_value", + display_name="display_name_value", + authorized_network="authorized_network_value", + zones=["zones_value"], + node_count=1070, + memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, + state=cloud_memcache.Instance.State.CREATING, + memcache_full_version="memcache_full_version_value", + discovery_endpoint="discovery_endpoint_value", + update_available=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_memcache.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_memcache.Instance) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.authorized_network == "authorized_network_value" + assert response.zones == ["zones_value"] + assert response.node_count == 1070 + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + assert response.state == cloud_memcache.Instance.State.CREATING + assert response.memcache_full_version == "memcache_full_version_value" + assert response.discovery_endpoint == "discovery_endpoint_value" + assert response.update_available is True + + +def test_get_instance_rest_required_fields( + request_type=cloud_memcache.GetInstanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_memcache.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.GetInstanceRequest.pb( + cloud_memcache.GetInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_memcache.Instance.to_json( + cloud_memcache.Instance() + ) + + request = cloud_memcache.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_memcache.Instance() + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.GetInstanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_memcache.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_memcache.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_get_instance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_memcache.GetInstanceRequest(), + name="name_value", + ) + + +def test_get_instance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["resource"] = { + "name": "name_value", + "display_name": "display_name_value", + "labels": {}, + "authorized_network": "authorized_network_value", + "zones": ["zones_value1", "zones_value2"], + "node_count": 1070, + "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, + "memcache_version": 1, + "parameters": {"id": "id_value", "params": {}}, + "memcache_nodes": [ + { + "node_id": "node_id_value", + "zone": "zone_value", + "state": 1, + "host": "host_value", + "port": 453, + "parameters": {}, + "update_available": True, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "memcache_full_version": "memcache_full_version_value", + "instance_messages": [{"code": 1, "message": "message_value"}], + "discovery_endpoint": "discovery_endpoint_value", + "update_available": True, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "schedule_deadline_time": {}, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_required_fields( + request_type=cloud_memcache.CreateInstanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == "instance_id_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("instanceId",)) + & set( + ( + "parent", + "instanceId", + "resource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_create_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.CreateInstanceRequest.pb( + cloud_memcache.CreateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.CreateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.CreateInstanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["resource"] = { + "name": "name_value", + "display_name": "display_name_value", + "labels": {}, + "authorized_network": "authorized_network_value", + "zones": ["zones_value1", "zones_value2"], + "node_count": 1070, + "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, + "memcache_version": 1, + "parameters": {"id": "id_value", "params": {}}, + "memcache_nodes": [ + { + "node_id": "node_id_value", + "zone": "zone_value", + "state": 1, + "host": "host_value", + "port": 453, + "parameters": {}, + "update_available": True, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "memcache_full_version": "memcache_full_version_value", + "instance_messages": [{"code": 1, "message": "message_value"}], + "discovery_endpoint": "discovery_endpoint_value", + "update_available": True, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "schedule_deadline_time": {}, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instance(request) + + +def test_create_instance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + instance_id="instance_id_value", + resource=cloud_memcache.Instance(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{parent=projects/*/locations/*}/instances" + % client.transport._host, + args[1], + ) + + +def test_create_instance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_memcache.CreateInstanceRequest(), + parent="parent_value", + instance_id="instance_id_value", + resource=cloud_memcache.Instance(name="name_value"), + ) + + +def test_create_instance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "resource": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["resource"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "display_name": "display_name_value", + "labels": {}, + "authorized_network": "authorized_network_value", + "zones": ["zones_value1", "zones_value2"], + "node_count": 1070, + "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, + "memcache_version": 1, + "parameters": {"id": "id_value", "params": {}}, + "memcache_nodes": [ + { + "node_id": "node_id_value", + "zone": "zone_value", + "state": 1, + "host": "host_value", + "port": 453, + "parameters": {}, + "update_available": True, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "memcache_full_version": "memcache_full_version_value", + "instance_messages": [{"code": 1, "message": "message_value"}], + "discovery_endpoint": "discovery_endpoint_value", + "update_available": True, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "schedule_deadline_time": {}, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_required_fields( + request_type=cloud_memcache.UpdateInstanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "resource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.UpdateInstanceRequest.pb( + cloud_memcache.UpdateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.UpdateInstanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "resource": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["resource"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "display_name": "display_name_value", + "labels": {}, + "authorized_network": "authorized_network_value", + "zones": ["zones_value1", "zones_value2"], + "node_count": 1070, + "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, + "memcache_version": 1, + "parameters": {"id": "id_value", "params": {}}, + "memcache_nodes": [ + { + "node_id": "node_id_value", + "zone": "zone_value", + "state": 1, + "host": "host_value", + "port": 453, + "parameters": {}, + "update_available": True, + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "memcache_full_version": "memcache_full_version_value", + "instance_messages": [{"code": 1, "message": "message_value"}], + "discovery_endpoint": "discovery_endpoint_value", + "update_available": True, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "schedule_deadline_time": {}, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + resource=cloud_memcache.Instance(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{resource.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_memcache.UpdateInstanceRequest(), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + resource=cloud_memcache.Instance(name="name_value"), + ) + + +def test_update_instance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.UpdateParametersRequest, + dict, + ], +) +def test_update_parameters_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_parameters(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_parameters_rest_required_fields( + request_type=cloud_memcache.UpdateParametersRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_parameters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_parameters_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_parameters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_parameters_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_update_parameters" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_update_parameters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.UpdateParametersRequest.pb( + cloud_memcache.UpdateParametersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.UpdateParametersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_parameters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_parameters_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.UpdateParametersRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_parameters(request) + + +def test_update_parameters_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_parameters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{name=projects/*/locations/*/instances/*}:updateParameters" + % client.transport._host, + args[1], + ) + + +def test_update_parameters_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_parameters( + cloud_memcache.UpdateParametersRequest(), + name="name_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + +def test_update_parameters_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_instance_rest_required_fields( + request_type=cloud_memcache.DeleteInstanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_delete_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.DeleteInstanceRequest.pb( + cloud_memcache.DeleteInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.DeleteInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.DeleteInstanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instance(request) + + +def test_delete_instance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_instance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_memcache.DeleteInstanceRequest(), + name="name_value", + ) + + +def test_delete_instance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ApplyParametersRequest, + dict, + ], +) +def test_apply_parameters_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.apply_parameters(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_apply_parameters_rest_required_fields( + request_type=cloud_memcache.ApplyParametersRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).apply_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).apply_parameters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.apply_parameters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_apply_parameters_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.apply_parameters._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_apply_parameters_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_apply_parameters" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_apply_parameters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.ApplyParametersRequest.pb( + cloud_memcache.ApplyParametersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.ApplyParametersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.apply_parameters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_apply_parameters_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.ApplyParametersRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_parameters(request) + + +def test_apply_parameters_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.apply_parameters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{name=projects/*/locations/*/instances/*}:applyParameters" + % client.transport._host, + args[1], + ) + + +def test_apply_parameters_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_parameters( + cloud_memcache.ApplyParametersRequest(), + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +def test_apply_parameters_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.ApplySoftwareUpdateRequest, + dict, + ], +) +def test_apply_software_update_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.apply_software_update(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_apply_software_update_rest_required_fields( + request_type=cloud_memcache.ApplySoftwareUpdateRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["instance"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).apply_software_update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = "instance_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).apply_software_update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == "instance_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.apply_software_update(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_apply_software_update_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.apply_software_update._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instance",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_apply_software_update_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_apply_software_update" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_apply_software_update" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.ApplySoftwareUpdateRequest.pb( + cloud_memcache.ApplySoftwareUpdateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.ApplySoftwareUpdateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.apply_software_update( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_apply_software_update_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.ApplySoftwareUpdateRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_software_update(request) + + +def test_apply_software_update_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + instance="instance_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.apply_software_update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{instance=projects/*/locations/*/instances/*}:applySoftwareUpdate" + % client.transport._host, + args[1], + ) + + +def test_apply_software_update_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_software_update( + cloud_memcache.ApplySoftwareUpdateRequest(), + instance="instance_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +def test_apply_software_update_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_memcache.RescheduleMaintenanceRequest, + dict, + ], +) +def test_reschedule_maintenance_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reschedule_maintenance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_reschedule_maintenance_rest_required_fields( + request_type=cloud_memcache.RescheduleMaintenanceRequest, +): + transport_class = transports.CloudMemcacheRestTransport + + request_init = {} + request_init["instance"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = "instance_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == "instance_value" + + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reschedule_maintenance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reschedule_maintenance_rest_unset_required_fields(): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instance", + "rescheduleType", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reschedule_maintenance_rest_interceptors(null_interceptor): + transport = transports.CloudMemcacheRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudMemcacheRestInterceptor(), + ) + client = CloudMemcacheClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_reschedule_maintenance" + ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "pre_reschedule_maintenance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_memcache.RescheduleMaintenanceRequest.pb( + cloud_memcache.RescheduleMaintenanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_memcache.RescheduleMaintenanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.reschedule_maintenance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reschedule_maintenance_rest_bad_request( + transport: str = "rest", request_type=cloud_memcache.RescheduleMaintenanceRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reschedule_maintenance(request) + + +def test_reschedule_maintenance_rest_flattened(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.reschedule_maintenance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/{instance=projects/*/locations/*/instances/*}:rescheduleMaintenance" + % client.transport._host, + args[1], + ) + + +def test_reschedule_maintenance_rest_flattened_error(transport: str = "rest"): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_memcache.RescheduleMaintenanceRequest(), + instance="instance_value", + reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_reschedule_maintenance_rest_error(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudMemcacheClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudMemcacheGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + transports.CloudMemcacheRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudMemcacheClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudMemcacheGrpcTransport, + ) + + +def test_cloud_memcache_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudMemcacheTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_memcache_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudMemcacheTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "update_parameters", + "delete_instance", + "apply_parameters", + "apply_software_update", + "reschedule_maintenance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_memcache_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_memcache_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport() + adc.assert_called_once() + + +def test_cloud_memcache_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudMemcacheClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + transports.CloudMemcacheRestTransport, + ], +) +def test_cloud_memcache_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() @@ -3465,11 +6258,40 @@ def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_cla ) +def test_cloud_memcache_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudMemcacheRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_cloud_memcache_rest_lro_client(): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cloud_memcache_host_no_port(transport_name): @@ -3480,7 +6302,11 @@ def test_cloud_memcache_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("memcache.googleapis.com:443") + assert client.transport._host == ( + "memcache.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memcache.googleapis.com" + ) @pytest.mark.parametrize( @@ -3488,6 +6314,7 @@ def test_cloud_memcache_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cloud_memcache_host_with_port(transport_name): @@ -3498,7 +6325,57 @@ def test_cloud_memcache_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("memcache.googleapis.com:8000") + assert client.transport._host == ( + "memcache.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memcache.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_memcache_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudMemcacheClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudMemcacheClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.update_parameters._session + session2 = client2.transport.update_parameters._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.apply_parameters._session + session2 = client2.transport.apply_parameters._session + assert session1 != session2 + session1 = client1.transport.apply_software_update._session + session2 = client2.transport.apply_software_update._session + assert session1 != session2 + session1 = client1.transport.reschedule_maintenance._session + session2 = client2.transport.reschedule_maintenance._session + assert session1 != session2 def test_cloud_memcache_grpc_transport_channel(): @@ -3825,6 +6702,352 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = CloudMemcacheClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + def test_delete_operation(transport: str = "grpc"): client = CloudMemcacheClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4681,6 +7904,7 @@ async def test_get_location_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -4698,6 +7922,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: From 4c01eed637c41010947c982d40f5ac1f9965b503 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 16 Feb 2023 20:35:25 +0000 Subject: [PATCH 151/159] chore(main): release 1.7.0 (#246) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/memcache/gapic_version.py | 2 +- google/cloud/memcache_v1/gapic_version.py | 2 +- google/cloud/memcache_v1beta2/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.memcache.v1.json | 2 +- .../snippet_metadata_google.cloud.memcache.v1beta2.json | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 093be7e..64e0684 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.6.1" + ".": "1.7.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index e5d22cd..5973e7e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.7.0](https://github.com/googleapis/python-memcache/compare/v1.6.1...v1.7.0) (2023-02-16) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#245](https://github.com/googleapis/python-memcache/issues/245)) ([f702f7a](https://github.com/googleapis/python-memcache/commit/f702f7a08d7a31689e400969d23e4a1d8637dd41)) + ## [1.6.1](https://github.com/googleapis/python-memcache/compare/v1.6.0...v1.6.1) (2023-01-20) diff --git a/google/cloud/memcache/gapic_version.py b/google/cloud/memcache/gapic_version.py index b4028ab..f033c61 100644 --- a/google/cloud/memcache/gapic_version.py +++ b/google/cloud/memcache/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.1" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/gapic_version.py b/google/cloud/memcache_v1/gapic_version.py index b4028ab..f033c61 100644 --- a/google/cloud/memcache_v1/gapic_version.py +++ b/google/cloud/memcache_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.1" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/google/cloud/memcache_v1beta2/gapic_version.py b/google/cloud/memcache_v1beta2/gapic_version.py index b4028ab..f033c61 100644 --- a/google/cloud/memcache_v1beta2/gapic_version.py +++ b/google/cloud/memcache_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.1" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 4fbe6f0..e3783a9 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.7.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index a19a59a..1a25abd 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.7.0" }, "snippets": [ { From 3005310fb11330890d9e72ffbe4b7905557e229a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 27 Feb 2023 16:18:46 +0000 Subject: [PATCH 152/159] chore(python): upgrade gcp-releasetool in .kokoro [autoapprove] (#249) Source-Link: https://togithub.com/googleapis/synthtool/commit/5f2a6089f73abf06238fe4310f6a14d6f6d1eed3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.in | 2 +- .kokoro/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 894fb6b..5fc5daa 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf + digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index cbd7e77..882178c 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x importlib-metadata typing-extensions twine diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 096e480..fa99c12 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -154,9 +154,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.10.0 \ - --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ - --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d +gcp-releasetool==1.10.5 \ + --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ + --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ From 41a9ec35004db48e33870237547a287add75675d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 27 Feb 2023 12:40:37 -0500 Subject: [PATCH 153/159] chore: Update gapic-generator-python to v1.8.5 (#248) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.5 PiperOrigin-RevId: 511892190 Source-Link: https://github.com/googleapis/googleapis/commit/a45d9c09c1287ffdf938f4e8083e791046c0b23b Source-Link: https://github.com/googleapis/googleapis-gen/commit/1907294b1d8365ea24f8c5f2e059a64124c4ed3b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTkwNzI5NGIxZDgzNjVlYTI0ZjhjNWYyZTA1OWE2NDEyNGM0ZWQzYiJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../cloud_memcache/transports/rest.py | 46 ++++++++---------- .../cloud/memcache_v1/types/cloud_memcache.py | 2 + .../cloud_memcache/transports/rest.py | 48 +++++++++---------- .../memcache_v1beta2/types/cloud_memcache.py | 2 + ...pet_metadata_google.cloud.memcache.v1.json | 2 +- ...etadata_google.cloud.memcache.v1beta2.json | 2 +- 6 files changed, 49 insertions(+), 53 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py index 3456e6f..736c109 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import ( @@ -331,7 +331,7 @@ def pre_get_location( self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]], - ) -> locations_pb2.Location: + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -340,7 +340,7 @@ def pre_get_location( return request, metadata def post_get_location( - self, response: locations_pb2.GetLocationRequest + self, response: locations_pb2.Location ) -> locations_pb2.Location: """Post-rpc interceptor for get_location @@ -354,7 +354,7 @@ def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> locations_pb2.ListLocationsResponse: + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -363,7 +363,7 @@ def pre_list_locations( return request, metadata def post_list_locations( - self, response: locations_pb2.ListLocationsRequest + self, response: locations_pb2.ListLocationsResponse ) -> locations_pb2.ListLocationsResponse: """Post-rpc interceptor for list_locations @@ -377,7 +377,7 @@ def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> None: + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -385,9 +385,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: operations_pb2.CancelOperationRequest - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -400,7 +398,7 @@ def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> None: + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -408,9 +406,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: operations_pb2.DeleteOperationRequest - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -423,7 +419,7 @@ def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.Operation: + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -432,7 +428,7 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.GetOperationRequest + self, response: operations_pb2.Operation ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -446,7 +442,7 @@ def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.ListOperationsResponse: + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -455,7 +451,7 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsRequest + self, response: operations_pb2.ListOperationsResponse ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -638,7 +634,7 @@ class _ApplyParameters(CloudMemcacheRestStub): def __hash__(self): return hash("ApplyParameters") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -738,7 +734,7 @@ class _CreateInstance(CloudMemcacheRestStub): def __hash__(self): return hash("CreateInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "instanceId": "", } @@ -838,7 +834,7 @@ class _DeleteInstance(CloudMemcacheRestStub): def __hash__(self): return hash("DeleteInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -927,7 +923,7 @@ class _GetInstance(CloudMemcacheRestStub): def __hash__(self): return hash("GetInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1015,7 +1011,7 @@ class _ListInstances(CloudMemcacheRestStub): def __hash__(self): return hash("ListInstances") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1105,7 +1101,7 @@ class _RescheduleMaintenance(CloudMemcacheRestStub): def __hash__(self): return hash("RescheduleMaintenance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1205,7 +1201,7 @@ class _UpdateInstance(CloudMemcacheRestStub): def __hash__(self): return hash("UpdateInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, } @@ -1305,7 +1301,7 @@ class _UpdateParameters(CloudMemcacheRestStub): def __hash__(self): return hash("UpdateParameters") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index cc2fd53..a01dc01 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import duration_pb2 # type: ignore diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py index d5c5170..a314179 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py @@ -17,7 +17,7 @@ import dataclasses import json # type: ignore import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings from google.api_core import ( @@ -362,7 +362,7 @@ def pre_get_location( self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]], - ) -> locations_pb2.Location: + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -371,7 +371,7 @@ def pre_get_location( return request, metadata def post_get_location( - self, response: locations_pb2.GetLocationRequest + self, response: locations_pb2.Location ) -> locations_pb2.Location: """Post-rpc interceptor for get_location @@ -385,7 +385,7 @@ def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> locations_pb2.ListLocationsResponse: + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -394,7 +394,7 @@ def pre_list_locations( return request, metadata def post_list_locations( - self, response: locations_pb2.ListLocationsRequest + self, response: locations_pb2.ListLocationsResponse ) -> locations_pb2.ListLocationsResponse: """Post-rpc interceptor for list_locations @@ -408,7 +408,7 @@ def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> None: + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -416,9 +416,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: operations_pb2.CancelOperationRequest - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -431,7 +429,7 @@ def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> None: + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -439,9 +437,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: operations_pb2.DeleteOperationRequest - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -454,7 +450,7 @@ def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.Operation: + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -463,7 +459,7 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.GetOperationRequest + self, response: operations_pb2.Operation ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -477,7 +473,7 @@ def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.ListOperationsResponse: + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -486,7 +482,7 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsRequest + self, response: operations_pb2.ListOperationsResponse ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -669,7 +665,7 @@ class _ApplyParameters(CloudMemcacheRestStub): def __hash__(self): return hash("ApplyParameters") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -769,7 +765,7 @@ class _ApplySoftwareUpdate(CloudMemcacheRestStub): def __hash__(self): return hash("ApplySoftwareUpdate") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -869,7 +865,7 @@ class _CreateInstance(CloudMemcacheRestStub): def __hash__(self): return hash("CreateInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "instanceId": "", } @@ -969,7 +965,7 @@ class _DeleteInstance(CloudMemcacheRestStub): def __hash__(self): return hash("DeleteInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1058,7 +1054,7 @@ class _GetInstance(CloudMemcacheRestStub): def __hash__(self): return hash("GetInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1146,7 +1142,7 @@ class _ListInstances(CloudMemcacheRestStub): def __hash__(self): return hash("ListInstances") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1236,7 +1232,7 @@ class _RescheduleMaintenance(CloudMemcacheRestStub): def __hash__(self): return hash("RescheduleMaintenance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -1336,7 +1332,7 @@ class _UpdateInstance(CloudMemcacheRestStub): def __hash__(self): return hash("UpdateInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask": {}, } @@ -1436,7 +1432,7 @@ class _UpdateParameters(CloudMemcacheRestStub): def __hash__(self): return hash("UpdateParameters") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 13cb54e..e786238 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import duration_pb2 # type: ignore diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index e3783a9..4fbe6f0 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.7.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index 1a25abd..a19a59a 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.7.0" + "version": "0.1.0" }, "snippets": [ { From 2f17f2573edacb5cf621f8e18da56b92edbceb33 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Mar 2023 11:26:12 +0000 Subject: [PATCH 154/159] chore(deps): Update nox in .kokoro/requirements.in [autoapprove] (#250) Source-Link: https://togithub.com/googleapis/synthtool/commit/92006bb3cdc84677aa93c7f5235424ec2b157146 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.in | 2 +- .kokoro/requirements.txt | 14 +++++--------- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 5fc5daa..b8edda5 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 + digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 882178c..ec867d9 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,6 +5,6 @@ typing-extensions twine wheel setuptools -nox +nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index fa99c12..66a2172 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # @@ -335,9 +335,9 @@ more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes -nox==2022.8.7 \ - --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ - --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c +nox==2022.11.21 \ + --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ + --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 # via -r requirements.in packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ @@ -380,10 +380,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core -py==1.11.0 \ - --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ - --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 - # via nox pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From dac4ef673c0ff54178ee4e204e64c516cbdf392a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 23 Mar 2023 09:32:29 -0400 Subject: [PATCH 155/159] docs: Fix formatting of request arg in docstring (#251) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Fix formatting of request arg in docstring chore: Update gapic-generator-python to v1.9.1 PiperOrigin-RevId: 518604533 Source-Link: https://github.com/googleapis/googleapis/commit/8a085aeddfa010af5bcef090827aac5255383d7e Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2ab4b0a0ae2907e812c209198a74e0898afcb04 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJhYjRiMGEwYWUyOTA3ZTgxMmMyMDkxOThhNzRlMDg5OGFmY2IwNCJ9 * 🩉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/cloud_memcache/transports/rest.py | 8 -------- .../services/cloud_memcache/transports/rest.py | 9 --------- 2 files changed, 17 deletions(-) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py index 736c109..1a79678 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py @@ -658,7 +658,6 @@ def __call__( request (~.cloud_memcache.ApplyParametersRequest): The request object. Request for [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -760,7 +759,6 @@ def __call__( request (~.cloud_memcache.CreateInstanceRequest): The request object. Request for [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -858,7 +856,6 @@ def __call__( request (~.cloud_memcache.DeleteInstanceRequest): The request object. Request for [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -947,7 +944,6 @@ def __call__( request (~.cloud_memcache.GetInstanceRequest): The request object. Request for [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1035,7 +1031,6 @@ def __call__( request (~.cloud_memcache.ListInstancesRequest): The request object. Request for [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1125,7 +1120,6 @@ def __call__( request (~.cloud_memcache.RescheduleMaintenanceRequest): The request object. Request for [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1227,7 +1221,6 @@ def __call__( request (~.cloud_memcache.UpdateInstanceRequest): The request object. Request for [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1325,7 +1318,6 @@ def __call__( request (~.cloud_memcache.UpdateParametersRequest): The request object. Request for [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py index a314179..fc45c4d 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py @@ -689,7 +689,6 @@ def __call__( request (~.cloud_memcache.ApplyParametersRequest): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -789,7 +788,6 @@ def __call__( request (~.cloud_memcache.ApplySoftwareUpdateRequest): The request object. Request for [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -891,7 +889,6 @@ def __call__( request (~.cloud_memcache.CreateInstanceRequest): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -989,7 +986,6 @@ def __call__( request (~.cloud_memcache.DeleteInstanceRequest): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1078,7 +1074,6 @@ def __call__( request (~.cloud_memcache.GetInstanceRequest): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1166,7 +1161,6 @@ def __call__( request (~.cloud_memcache.ListInstancesRequest): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1256,7 +1250,6 @@ def __call__( request (~.cloud_memcache.RescheduleMaintenanceRequest): The request object. Request for [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1358,7 +1351,6 @@ def __call__( request (~.cloud_memcache.UpdateInstanceRequest): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1456,7 +1448,6 @@ def __call__( request (~.cloud_memcache.UpdateParametersRequest): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. From 52f02474ddabf3080d3ec87b7749f1b13b264856 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 10:48:22 -0400 Subject: [PATCH 156/159] chore(main): release 1.7.1 (#252) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/memcache/gapic_version.py | 2 +- google/cloud/memcache_v1/gapic_version.py | 2 +- google/cloud/memcache_v1beta2/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.memcache.v1.json | 2 +- .../snippet_metadata_google.cloud.memcache.v1beta2.json | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 64e0684..d0972da 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.7.0" + ".": "1.7.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 5973e7e..ec00ed3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.7.1](https://github.com/googleapis/python-memcache/compare/v1.7.0...v1.7.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#251](https://github.com/googleapis/python-memcache/issues/251)) ([dac4ef6](https://github.com/googleapis/python-memcache/commit/dac4ef673c0ff54178ee4e204e64c516cbdf392a)) + ## [1.7.0](https://github.com/googleapis/python-memcache/compare/v1.6.1...v1.7.0) (2023-02-16) diff --git a/google/cloud/memcache/gapic_version.py b/google/cloud/memcache/gapic_version.py index f033c61..84856f0 100644 --- a/google/cloud/memcache/gapic_version.py +++ b/google/cloud/memcache/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.0" # {x-release-please-version} +__version__ = "1.7.1" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/gapic_version.py b/google/cloud/memcache_v1/gapic_version.py index f033c61..84856f0 100644 --- a/google/cloud/memcache_v1/gapic_version.py +++ b/google/cloud/memcache_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.0" # {x-release-please-version} +__version__ = "1.7.1" # {x-release-please-version} diff --git a/google/cloud/memcache_v1beta2/gapic_version.py b/google/cloud/memcache_v1beta2/gapic_version.py index f033c61..84856f0 100644 --- a/google/cloud/memcache_v1beta2/gapic_version.py +++ b/google/cloud/memcache_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.0" # {x-release-please-version} +__version__ = "1.7.1" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 4fbe6f0..87d2277 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.7.1" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index a19a59a..6152ae0 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "0.1.0" + "version": "1.7.1" }, "snippets": [ { From 0b6c98ee6f9de2e61a866cfba975cee21cb51ce0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 16:22:19 +0000 Subject: [PATCH 157/159] build(deps): bump requests to 2.31.0 [autoapprove] (#254) Source-Link: https://togithub.com/googleapis/synthtool/commit/30bd01b4ab78bf1b2a425816e15b3e7e090993dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b --- .github/.OwlBot.lock.yaml | 3 ++- .kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b8edda5..32b3c48 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b +# created: 2023-05-25T14:56:16.294623272Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 66a2172..3b8d7ee 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -419,9 +419,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core From 695935631223a73ea872d433ca189d91d5fa4cc0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 3 Jun 2023 22:00:41 +0000 Subject: [PATCH 158/159] build(deps): bump cryptography to 41.0.0 [autoapprove] (#256) Source-Link: https://togithub.com/googleapis/synthtool/commit/d0f51a0c2a9a6bcca86911eabea9e484baadf64b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 42 +++++++++++++++++++-------------------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 32b3c48..02a4ded 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b -# created: 2023-05-25T14:56:16.294623272Z + digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc +# created: 2023-06-03T21:25:37.968717478Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 3b8d7ee..c7929db 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,28 +113,26 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.0 \ + --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ + --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ + --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ + --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ + --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ + --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ + --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ + --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ + --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ + --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ + --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ + --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ + --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ + --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ + --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ + --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ + --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ + --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ + --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be # via # gcp-releasetool # secretstorage From 3bd4e8e063c56bd398a986c9c149aaa128283094 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 6 Jun 2023 06:44:27 -0400 Subject: [PATCH 159/159] build: update README to indicate that source has moved and delete all files (#258) --- .coveragerc | 13 - .eggs/README.txt | 6 - .flake8 | 33 - .github/.OwlBot.lock.yaml | 17 - .github/.OwlBot.yaml | 26 - .github/CODEOWNERS | 12 - .github/CONTRIBUTING.md | 28 - .github/ISSUE_TEMPLATE/bug_report.md | 43 - .github/ISSUE_TEMPLATE/feature_request.md | 18 - .github/ISSUE_TEMPLATE/support_request.md | 7 - .github/PULL_REQUEST_TEMPLATE.md | 7 - .github/auto-approve.yml | 3 - .github/auto-label.yaml | 15 - .github/header-checker-lint.yml | 15 - .github/release-please.yml | 9 - .github/release-trigger.yml | 1 - .github/snippet-bot.yml | 0 .github/workflows/docs.yml | 38 - .github/workflows/lint.yml | 25 - .github/workflows/unittest.yml | 57 - .gitignore | 63 - .kokoro/build.sh | 59 - .kokoro/continuous/common.cfg | 27 - .kokoro/continuous/continuous.cfg | 1 - .kokoro/continuous/prerelease-deps.cfg | 7 - .kokoro/docker/docs/Dockerfile | 83 - .kokoro/docs/common.cfg | 66 - .kokoro/docs/docs-presubmit.cfg | 28 - .kokoro/docs/docs.cfg | 1 - .kokoro/populate-secrets.sh | 43 - .kokoro/presubmit/common.cfg | 27 - .kokoro/presubmit/prerelease-deps.cfg | 7 - .kokoro/presubmit/presubmit.cfg | 1 - .kokoro/publish-docs.sh | 62 - .kokoro/release.sh | 29 - .kokoro/release/common.cfg | 40 - .kokoro/release/release.cfg | 1 - .kokoro/requirements.in | 10 - .kokoro/requirements.txt | 492 - .kokoro/samples/lint/common.cfg | 34 - .kokoro/samples/lint/continuous.cfg | 6 - .kokoro/samples/lint/periodic.cfg | 6 - .kokoro/samples/lint/presubmit.cfg | 6 - .kokoro/samples/python3.10/common.cfg | 40 - .kokoro/samples/python3.10/continuous.cfg | 6 - .kokoro/samples/python3.10/periodic-head.cfg | 11 - .kokoro/samples/python3.10/periodic.cfg | 6 - .kokoro/samples/python3.10/presubmit.cfg | 6 - .kokoro/samples/python3.11/common.cfg | 40 - .kokoro/samples/python3.11/continuous.cfg | 6 - .kokoro/samples/python3.11/periodic-head.cfg | 11 - .kokoro/samples/python3.11/periodic.cfg | 6 - .kokoro/samples/python3.11/presubmit.cfg | 6 - .kokoro/samples/python3.7/common.cfg | 40 - .kokoro/samples/python3.7/continuous.cfg | 6 - .kokoro/samples/python3.7/periodic-head.cfg | 11 - .kokoro/samples/python3.7/periodic.cfg | 6 - .kokoro/samples/python3.7/presubmit.cfg | 6 - .kokoro/samples/python3.8/common.cfg | 40 - .kokoro/samples/python3.8/continuous.cfg | 6 - .kokoro/samples/python3.8/periodic-head.cfg | 11 - .kokoro/samples/python3.8/periodic.cfg | 6 - .kokoro/samples/python3.8/presubmit.cfg | 6 - .kokoro/samples/python3.9/common.cfg | 40 - .kokoro/samples/python3.9/continuous.cfg | 6 - .kokoro/samples/python3.9/periodic-head.cfg | 11 - .kokoro/samples/python3.9/periodic.cfg | 6 - .kokoro/samples/python3.9/presubmit.cfg | 6 - .kokoro/test-samples-against-head.sh | 26 - .kokoro/test-samples-impl.sh | 102 - .kokoro/test-samples.sh | 44 - .kokoro/trampoline.sh | 28 - .kokoro/trampoline_v2.sh | 487 - .pre-commit-config.yaml | 31 - .release-please-manifest.json | 3 - .trampolinerc | 63 - CHANGELOG.md | 242 - CODE_OF_CONDUCT.md | 95 - CONTRIBUTING.rst | 281 - MANIFEST.in | 25 - README.rst | 5 + docs/README.rst | 1 - docs/_static/custom.css | 20 - docs/_templates/layout.html | 50 - docs/changelog.md | 1 - docs/conf.py | 384 - docs/index.rst | 34 - docs/memcache_v1/cloud_memcache.rst | 10 - docs/memcache_v1/services.rst | 6 - docs/memcache_v1/types.rst | 6 - docs/memcache_v1beta2/cloud_memcache.rst | 10 - docs/memcache_v1beta2/services.rst | 6 - docs/memcache_v1beta2/types.rst | 6 - docs/multiprocessing.rst | 7 - google/cloud/memcache/__init__.py | 67 - google/cloud/memcache/gapic_version.py | 16 - google/cloud/memcache/py.typed | 2 - google/cloud/memcache_v1/__init__.py | 64 - google/cloud/memcache_v1/gapic_metadata.json | 148 - google/cloud/memcache_v1/gapic_version.py | 16 - google/cloud/memcache_v1/py.typed | 2 - google/cloud/memcache_v1/services/__init__.py | 15 - .../services/cloud_memcache/__init__.py | 22 - .../services/cloud_memcache/async_client.py | 1644 ---- .../services/cloud_memcache/client.py | 1878 ---- .../services/cloud_memcache/pagers.py | 155 - .../cloud_memcache/transports/__init__.py | 36 - .../cloud_memcache/transports/base.py | 317 - .../cloud_memcache/transports/grpc.py | 598 -- .../cloud_memcache/transports/grpc_asyncio.py | 613 -- .../cloud_memcache/transports/rest.py | 1865 ---- google/cloud/memcache_v1/types/__init__.py | 56 - .../cloud/memcache_v1/types/cloud_memcache.py | 922 -- google/cloud/memcache_v1beta2/__init__.py | 66 - .../memcache_v1beta2/gapic_metadata.json | 163 - .../cloud/memcache_v1beta2/gapic_version.py | 16 - google/cloud/memcache_v1beta2/py.typed | 2 - .../memcache_v1beta2/services/__init__.py | 15 - .../services/cloud_memcache/__init__.py | 22 - .../services/cloud_memcache/async_client.py | 1791 ---- .../services/cloud_memcache/client.py | 2025 ----- .../services/cloud_memcache/pagers.py | 155 - .../cloud_memcache/transports/__init__.py | 36 - .../cloud_memcache/transports/base.py | 331 - .../cloud_memcache/transports/grpc.py | 628 -- .../cloud_memcache/transports/grpc_asyncio.py | 643 -- .../cloud_memcache/transports/rest.py | 2005 ----- .../cloud/memcache_v1beta2/types/__init__.py | 58 - .../memcache_v1beta2/types/cloud_memcache.py | 973 -- memcache-v1beta2-py.tar.gz | 0 mypy.ini | 3 - noxfile.py | 426 - owlbot.py | 56 - release-please-config.json | 30 - renovate.json | 12 - ...d_cloud_memcache_apply_parameters_async.py | 56 - ...ed_cloud_memcache_apply_parameters_sync.py | 56 - ...ed_cloud_memcache_create_instance_async.py | 64 - ...ted_cloud_memcache_create_instance_sync.py | 64 - ...ed_cloud_memcache_delete_instance_async.py | 56 - ...ted_cloud_memcache_delete_instance_sync.py | 56 - ...rated_cloud_memcache_get_instance_async.py | 52 - ...erated_cloud_memcache_get_instance_sync.py | 52 - ...ted_cloud_memcache_list_instances_async.py | 53 - ...ated_cloud_memcache_list_instances_sync.py | 53 - ...d_memcache_reschedule_maintenance_async.py | 57 - ...ud_memcache_reschedule_maintenance_sync.py | 57 - ...ed_cloud_memcache_update_instance_async.py | 62 - ...ted_cloud_memcache_update_instance_sync.py | 62 - ..._cloud_memcache_update_parameters_async.py | 56 - ...d_cloud_memcache_update_parameters_sync.py | 56 - ...d_cloud_memcache_apply_parameters_async.py | 56 - ...ed_cloud_memcache_apply_parameters_sync.py | 56 - ...ud_memcache_apply_software_update_async.py | 56 - ...oud_memcache_apply_software_update_sync.py | 56 - ...ed_cloud_memcache_create_instance_async.py | 64 - ...ted_cloud_memcache_create_instance_sync.py | 64 - ...ed_cloud_memcache_delete_instance_async.py | 56 - ...ted_cloud_memcache_delete_instance_sync.py | 56 - ...rated_cloud_memcache_get_instance_async.py | 52 - ...erated_cloud_memcache_get_instance_sync.py | 52 - ...ted_cloud_memcache_list_instances_async.py | 53 - ...ated_cloud_memcache_list_instances_sync.py | 53 - ...d_memcache_reschedule_maintenance_async.py | 57 - ...ud_memcache_reschedule_maintenance_sync.py | 57 - ...ed_cloud_memcache_update_instance_async.py | 62 - ...ted_cloud_memcache_update_instance_sync.py | 62 - ..._cloud_memcache_update_parameters_async.py | 56 - ...d_cloud_memcache_update_parameters_sync.py | 56 - ...pet_metadata_google.cloud.memcache.v1.json | 1375 --- ...etadata_google.cloud.memcache.v1beta2.json | 1552 ---- scripts/decrypt-secrets.sh | 46 - scripts/fixup_keywords.py | 183 - scripts/fixup_memcache_v1_keywords.py | 183 - scripts/fixup_memcache_v1beta2_keywords.py | 184 - scripts/readme-gen/readme_gen.py | 69 - scripts/readme-gen/templates/README.tmpl.rst | 87 - scripts/readme-gen/templates/auth.tmpl.rst | 9 - .../templates/auth_api_key.tmpl.rst | 14 - .../templates/install_deps.tmpl.rst | 29 - .../templates/install_portaudio.tmpl.rst | 35 - setup.cfg | 19 - setup.py | 90 - testing/.gitignore | 3 - testing/constraints-3.10.txt | 6 - testing/constraints-3.11.txt | 6 - testing/constraints-3.12.txt | 6 - testing/constraints-3.7.txt | 9 - testing/constraints-3.8.txt | 6 - testing/constraints-3.9.txt | 6 - tests/__init__.py | 15 - tests/unit/__init__.py | 15 - tests/unit/gapic/__init__.py | 15 - tests/unit/gapic/memcache_v1/__init__.py | 15 - .../gapic/memcache_v1/test_cloud_memcache.py | 7415 --------------- tests/unit/gapic/memcache_v1beta2/__init__.py | 15 - .../memcache_v1beta2/test_cloud_memcache.py | 7968 ----------------- 197 files changed, 5 insertions(+), 42988 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .eggs/README.txt delete mode 100644 .flake8 delete mode 100644 .github/.OwlBot.lock.yaml delete mode 100644 .github/.OwlBot.yaml delete mode 100644 .github/CODEOWNERS delete mode 100644 .github/CONTRIBUTING.md delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 .github/ISSUE_TEMPLATE/support_request.md delete mode 100644 .github/PULL_REQUEST_TEMPLATE.md delete mode 100644 .github/auto-approve.yml delete mode 100644 .github/auto-label.yaml delete mode 100644 .github/header-checker-lint.yml delete mode 100644 .github/release-please.yml delete mode 100644 .github/release-trigger.yml delete mode 100644 .github/snippet-bot.yml delete mode 100644 .github/workflows/docs.yml delete mode 100644 .github/workflows/lint.yml delete mode 100644 .github/workflows/unittest.yml delete mode 100644 .gitignore delete mode 100755 .kokoro/build.sh delete mode 100644 .kokoro/continuous/common.cfg delete mode 100644 .kokoro/continuous/continuous.cfg delete mode 100644 .kokoro/continuous/prerelease-deps.cfg delete mode 100644 .kokoro/docker/docs/Dockerfile delete mode 100644 .kokoro/docs/common.cfg delete mode 100644 .kokoro/docs/docs-presubmit.cfg delete mode 100644 .kokoro/docs/docs.cfg delete mode 100755 .kokoro/populate-secrets.sh delete mode 100644 .kokoro/presubmit/common.cfg delete mode 100644 .kokoro/presubmit/prerelease-deps.cfg delete mode 100644 .kokoro/presubmit/presubmit.cfg delete mode 100755 .kokoro/publish-docs.sh delete mode 100755 .kokoro/release.sh delete mode 100644 .kokoro/release/common.cfg delete mode 100644 .kokoro/release/release.cfg delete mode 100644 .kokoro/requirements.in delete mode 100644 .kokoro/requirements.txt delete mode 100644 .kokoro/samples/lint/common.cfg delete mode 100644 .kokoro/samples/lint/continuous.cfg delete mode 100644 .kokoro/samples/lint/periodic.cfg delete mode 100644 .kokoro/samples/lint/presubmit.cfg delete mode 100644 .kokoro/samples/python3.10/common.cfg delete mode 100644 .kokoro/samples/python3.10/continuous.cfg delete mode 100644 .kokoro/samples/python3.10/periodic-head.cfg delete mode 100644 .kokoro/samples/python3.10/periodic.cfg delete mode 100644 .kokoro/samples/python3.10/presubmit.cfg delete mode 100644 .kokoro/samples/python3.11/common.cfg delete mode 100644 .kokoro/samples/python3.11/continuous.cfg delete mode 100644 .kokoro/samples/python3.11/periodic-head.cfg delete mode 100644 .kokoro/samples/python3.11/periodic.cfg delete mode 100644 .kokoro/samples/python3.11/presubmit.cfg delete mode 100644 .kokoro/samples/python3.7/common.cfg delete mode 100644 .kokoro/samples/python3.7/continuous.cfg delete mode 100644 .kokoro/samples/python3.7/periodic-head.cfg delete mode 100644 .kokoro/samples/python3.7/periodic.cfg delete mode 100644 .kokoro/samples/python3.7/presubmit.cfg delete mode 100644 .kokoro/samples/python3.8/common.cfg delete mode 100644 .kokoro/samples/python3.8/continuous.cfg delete mode 100644 .kokoro/samples/python3.8/periodic-head.cfg delete mode 100644 .kokoro/samples/python3.8/periodic.cfg delete mode 100644 .kokoro/samples/python3.8/presubmit.cfg delete mode 100644 .kokoro/samples/python3.9/common.cfg delete mode 100644 .kokoro/samples/python3.9/continuous.cfg delete mode 100644 .kokoro/samples/python3.9/periodic-head.cfg delete mode 100644 .kokoro/samples/python3.9/periodic.cfg delete mode 100644 .kokoro/samples/python3.9/presubmit.cfg delete mode 100755 .kokoro/test-samples-against-head.sh delete mode 100755 .kokoro/test-samples-impl.sh delete mode 100755 .kokoro/test-samples.sh delete mode 100755 .kokoro/trampoline.sh delete mode 100755 .kokoro/trampoline_v2.sh delete mode 100644 .pre-commit-config.yaml delete mode 100644 .release-please-manifest.json delete mode 100644 .trampolinerc delete mode 100644 CHANGELOG.md delete mode 100644 CODE_OF_CONDUCT.md delete mode 100644 CONTRIBUTING.rst delete mode 100644 MANIFEST.in delete mode 120000 docs/README.rst delete mode 100644 docs/_static/custom.css delete mode 100644 docs/_templates/layout.html delete mode 120000 docs/changelog.md delete mode 100644 docs/conf.py delete mode 100644 docs/index.rst delete mode 100644 docs/memcache_v1/cloud_memcache.rst delete mode 100644 docs/memcache_v1/services.rst delete mode 100644 docs/memcache_v1/types.rst delete mode 100644 docs/memcache_v1beta2/cloud_memcache.rst delete mode 100644 docs/memcache_v1beta2/services.rst delete mode 100644 docs/memcache_v1beta2/types.rst delete mode 100644 docs/multiprocessing.rst delete mode 100644 google/cloud/memcache/__init__.py delete mode 100644 google/cloud/memcache/gapic_version.py delete mode 100644 google/cloud/memcache/py.typed delete mode 100644 google/cloud/memcache_v1/__init__.py delete mode 100644 google/cloud/memcache_v1/gapic_metadata.json delete mode 100644 google/cloud/memcache_v1/gapic_version.py delete mode 100644 google/cloud/memcache_v1/py.typed delete mode 100644 google/cloud/memcache_v1/services/__init__.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/__init__.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/async_client.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/client.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/pagers.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/base.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py delete mode 100644 google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py delete mode 100644 google/cloud/memcache_v1/types/__init__.py delete mode 100644 google/cloud/memcache_v1/types/cloud_memcache.py delete mode 100644 google/cloud/memcache_v1beta2/__init__.py delete mode 100644 google/cloud/memcache_v1beta2/gapic_metadata.json delete mode 100644 google/cloud/memcache_v1beta2/gapic_version.py delete mode 100644 google/cloud/memcache_v1beta2/py.typed delete mode 100644 google/cloud/memcache_v1beta2/services/__init__.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/client.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py delete mode 100644 google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py delete mode 100644 google/cloud/memcache_v1beta2/types/__init__.py delete mode 100644 google/cloud/memcache_v1beta2/types/cloud_memcache.py delete mode 100644 memcache-v1beta2-py.tar.gz delete mode 100644 mypy.ini delete mode 100644 noxfile.py delete mode 100644 owlbot.py delete mode 100644 release-please-config.json delete mode 100644 renovate.json delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py delete mode 100644 samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py delete mode 100644 samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py delete mode 100644 samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json delete mode 100644 samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json delete mode 100755 scripts/decrypt-secrets.sh delete mode 100644 scripts/fixup_keywords.py delete mode 100644 scripts/fixup_memcache_v1_keywords.py delete mode 100644 scripts/fixup_memcache_v1beta2_keywords.py delete mode 100644 scripts/readme-gen/readme_gen.py delete mode 100644 scripts/readme-gen/templates/README.tmpl.rst delete mode 100644 scripts/readme-gen/templates/auth.tmpl.rst delete mode 100644 scripts/readme-gen/templates/auth_api_key.tmpl.rst delete mode 100644 scripts/readme-gen/templates/install_deps.tmpl.rst delete mode 100644 scripts/readme-gen/templates/install_portaudio.tmpl.rst delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 testing/.gitignore delete mode 100644 testing/constraints-3.10.txt delete mode 100644 testing/constraints-3.11.txt delete mode 100644 testing/constraints-3.12.txt delete mode 100644 testing/constraints-3.7.txt delete mode 100644 testing/constraints-3.8.txt delete mode 100644 testing/constraints-3.9.txt delete mode 100644 tests/__init__.py delete mode 100644 tests/unit/__init__.py delete mode 100644 tests/unit/gapic/__init__.py delete mode 100644 tests/unit/gapic/memcache_v1/__init__.py delete mode 100644 tests/unit/gapic/memcache_v1/test_cloud_memcache.py delete mode 100644 tests/unit/gapic/memcache_v1beta2/__init__.py delete mode 100644 tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index b16a38f..0000000 --- a/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/memcache/__init__.py - google/cloud/memcache/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/.eggs/README.txt b/.eggs/README.txt deleted file mode 100644 index 5d01668..0000000 --- a/.eggs/README.txt +++ /dev/null @@ -1,6 +0,0 @@ -This directory contains eggs that were downloaded by setuptools to build, test, and run plug-ins. - -This directory caches those eggs to prevent repeated downloads. - -However, it is safe to delete this directory. - diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 2e43874..0000000 --- a/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E231, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml deleted file mode 100644 index 02a4ded..0000000 --- a/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc -# created: 2023-06-03T21:25:37.968717478Z diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml deleted file mode 100644 index c431d29..0000000 --- a/.github/.OwlBot.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - -deep-remove-regex: - - /owl-bot-staging - -deep-copy-regex: - - source: /google/cloud/memcache/(v.*)/.*-py/(.*) - dest: /owl-bot-staging/$1/$2 - -begin-after-commit-hash: 6a5da3f1274b088752f074da5bc9e30bd1beb27e - diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS deleted file mode 100644 index e446644..0000000 --- a/.github/CODEOWNERS +++ /dev/null @@ -1,12 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. -# -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. - -# @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/yoshi-python - -# @googleapis/python-samples-reviewers is the default owner for samples changes -/samples/ @googleapis/python-samples-reviewers diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md deleted file mode 100644 index 939e534..0000000 --- a/.github/CONTRIBUTING.md +++ /dev/null @@ -1,28 +0,0 @@ -# How to Contribute - -We'd love to accept your patches and contributions to this project. There are -just a few small guidelines you need to follow. - -## Contributor License Agreement - -Contributions to this project must be accompanied by a Contributor License -Agreement. You (or your employer) retain the copyright to your contribution; -this simply gives us permission to use and redistribute your contributions as -part of the project. Head over to to see -your current agreements on file or to sign a new one. - -You generally only need to submit a CLA once, so if you've already submitted one -(even if it was for a different project), you probably don't need to do it -again. - -## Code reviews - -All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. - -## Community Guidelines - -This project follows [Google's Open Source Community -Guidelines](https://opensource.google.com/conduct/). diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 3f5c313..0000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -Please run down the following list and make sure you've tried the usual "quick fixes": - - - Search the issues already opened: https://github.com/googleapis/python-memcache/issues - - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python - -If you are still having issues, please be sure to include as much information as possible: - -#### Environment details - - - OS type and version: - - Python version: `python --version` - - pip version: `pip --version` - - `google-cloud-memcache` version: `pip show google-cloud-memcache` - -#### Steps to reproduce - - 1. ? - 2. ? - -#### Code example - -```python -# example -``` - -#### Stack trace -``` -# example -``` - -Making sure to follow these steps will guarantee the quickest resolution possible. - -Thanks! diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 6365857..0000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - - **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - **Describe the solution you'd like** -A clear and concise description of what you want to happen. - **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md deleted file mode 100644 index 9958690..0000000 --- a/.github/ISSUE_TEMPLATE/support_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Support request -about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 882022b..0000000 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,7 +0,0 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-memcache/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea -- [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) - -Fixes # 🩕 diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml deleted file mode 100644 index 311ebbb..0000000 --- a/.github/auto-approve.yml +++ /dev/null @@ -1,3 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve -processes: - - "OwlBotTemplateChanges" diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml deleted file mode 100644 index 41bff0b..0000000 --- a/.github/auto-label.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -requestsize: - enabled: true diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml deleted file mode 100644 index 6fe78aa..0000000 --- a/.github/header-checker-lint.yml +++ /dev/null @@ -1,15 +0,0 @@ -{"allowedCopyrightHolders": ["Google LLC"], - "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], - "sourceFileExtensions": [ - "ts", - "js", - "java", - "sh", - "Dockerfile", - "yaml", - "py", - "html", - "txt" - ] -} \ No newline at end of file diff --git a/.github/release-please.yml b/.github/release-please.yml deleted file mode 100644 index e9a4f00..0000000 --- a/.github/release-please.yml +++ /dev/null @@ -1,9 +0,0 @@ -releaseType: python -handleGHRelease: true -manifest: true -# NOTE: this section is generated by synthtool.languages.python -# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py -branches: -- branch: v0 - handleGHRelease: true - releaseType: python diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml deleted file mode 100644 index d4ca941..0000000 --- a/.github/release-trigger.yml +++ /dev/null @@ -1 +0,0 @@ -enabled: true diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml deleted file mode 100644 index e69de29..0000000 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml deleted file mode 100644 index e97d89e..0000000 --- a/.github/workflows/docs.yml +++ /dev/null @@ -1,38 +0,0 @@ -on: - pull_request: - branches: - - main -name: docs -jobs: - docs: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: "3.9" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run docs - run: | - nox -s docs - docfx: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: "3.9" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run docfx - run: | - nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 16d5a9e..0000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,25 +0,0 @@ -on: - pull_request: - branches: - - main -name: lint -jobs: - lint: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run lint - run: | - nox -s lint - - name: Run lint_setup_py - run: | - nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml deleted file mode 100644 index 8057a76..0000000 --- a/.github/workflows/unittest.yml +++ /dev/null @@ -1,57 +0,0 @@ -on: - pull_request: - branches: - - main -name: unittest -jobs: - unit: - runs-on: ubuntu-latest - strategy: - matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11'] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run unit tests - env: - COVERAGE_FILE: .coverage-${{ matrix.python }} - run: | - nox -s unit-${{ matrix.python }} - - name: Upload coverage results - uses: actions/upload-artifact@v3 - with: - name: coverage-artifacts - path: .coverage-${{ matrix.python }} - - cover: - runs-on: ubuntu-latest - needs: - - unit - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - - name: Install coverage - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install coverage - - name: Download coverage results - uses: actions/download-artifact@v3 - with: - name: coverage-artifacts - path: .coverage-results/ - - name: Report coverage results - run: | - coverage combine .coverage-results/.coverage* - coverage report --show-missing --fail-under=100 diff --git a/.gitignore b/.gitignore deleted file mode 100644 index b4243ce..0000000 --- a/.gitignore +++ /dev/null @@ -1,63 +0,0 @@ -*.py[cod] -*.sw[op] - -# C extensions -*.so - -# Packages -*.egg -*.egg-info -dist -build -eggs -.eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg -lib -lib64 -__pycache__ - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -.coverage -.nox -.cache -.pytest_cache - - -# Mac -.DS_Store - -# JetBrains -.idea - -# VS Code -.vscode - -# emacs -*~ - -# Built documentation -docs/_build -bigquery/docs/generated -docs.metadata - -# Virtual environment -env/ - -# Test logs -coverage.xml -*sponge_log.xml - -# System test environment variables. -system_tests/local_test_setup - -# Make sure a generated file isn't accidentally committed. -pylintrc -pylintrc.test diff --git a/.kokoro/build.sh b/.kokoro/build.sh deleted file mode 100755 index 67faddb..0000000 --- a/.kokoro/build.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-memcache" -fi - -cd "${PROJECT_ROOT}" - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json - -# Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") - -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - -# If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then - cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - } - trap cleanup EXIT HUP -fi - -# If NOX_SESSION is set, it only runs the specified session, -# otherwise run all the sessions. -if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} -else - python3 -m nox -fi diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg deleted file mode 100644 index aef2f4a..0000000 --- a/.kokoro/continuous/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/build.sh" -} diff --git a/.kokoro/continuous/continuous.cfg b/.kokoro/continuous/continuous.cfg deleted file mode 100644 index 8f43917..0000000 --- a/.kokoro/continuous/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg deleted file mode 100644 index 3595fb4..0000000 --- a/.kokoro/continuous/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index f8137d0..0000000 --- a/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:22.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - python3-distutils \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - -###################### Install python 3.9.13 - -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz - -# Extract files -RUN tar -xvf Python-3.9.13.tgz - -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations -RUN make altinstall - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3 -m pip - -CMD ["python3.8"] diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg deleted file mode 100644 index 7cebbb8..0000000 --- a/.kokoro/docs/common.cfg +++ /dev/null @@ -1,66 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} \ No newline at end of file diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index b15caf9..0000000 --- a/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917..0000000 --- a/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh deleted file mode 100755 index f525142..0000000 --- a/.kokoro/populate-secrets.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} -function msg { println "$*" >&2 ;} -function println { printf '%s\n' "$(now) $*" ;} - - -# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: -# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com -SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" -msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" -mkdir -p ${SECRET_LOCATION} -for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") -do - msg "Retrieving secret ${key}" - docker run --entrypoint=gcloud \ - --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ - gcr.io/google.com/cloudsdktool/cloud-sdk \ - secrets versions access latest \ - --project cloud-devrel-kokoro-resources \ - --secret ${key} > \ - "${SECRET_LOCATION}/${key}" - if [[ $? == 0 ]]; then - msg "Secret written to ${SECRET_LOCATION}/${key}" - else - msg "Error retrieving secret ${key}" - fi -done diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg deleted file mode 100644 index aef2f4a..0000000 --- a/.kokoro/presubmit/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/build.sh" -} diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg deleted file mode 100644 index 3595fb4..0000000 --- a/.kokoro/presubmit/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg deleted file mode 100644 index 8f43917..0000000 --- a/.kokoro/presubmit/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh deleted file mode 100755 index 1c4d623..0000000 --- a/.kokoro/publish-docs.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version - -# build docs -nox -s docs - -# create metadata -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index 73c1295..0000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-memcache/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") -cd github/python-memcache -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index aea3116..0000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" - } - } -} - -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917..0000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in deleted file mode 100644 index ec867d9..0000000 --- a/.kokoro/requirements.in +++ /dev/null @@ -1,10 +0,0 @@ -gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt deleted file mode 100644 index c7929db..0000000 --- a/.kokoro/requirements.txt +++ /dev/null @@ -1,492 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e - # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c - # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db - # via google-auth -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 - # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 - # via - # gcp-docuploader - # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be - # via - # gcp-releasetool - # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e - # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc - # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 - # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf - # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 - # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a - # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 - # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f - # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c - # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 - # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 - # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 - # via - # gcp-releasetool - # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 - # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab - # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 - # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 - # via - # gcp-releasetool - # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c - # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 - # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 - # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 - # via cffi -pygments==2.13.0 \ - --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ - --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 - # via - # readme-renderer - # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 - # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 - # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 - # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 - # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # bleach - # gcp-docuploader - # google-auth - # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 - # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e - # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 - # via - # requests - # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 - # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 - # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f - # via -r requirements.in diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg deleted file mode 100644 index 2672aa9..0000000 --- a/.kokoro/samples/lint/common.cfg +++ /dev/null @@ -1,34 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "lint" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/lint/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg deleted file mode 100644 index 50fec96..0000000 --- a/.kokoro/samples/lint/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} \ No newline at end of file diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/lint/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg deleted file mode 100644 index e6768d6..0000000 --- a/.kokoro/samples/python3.10/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.10" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-310" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.10/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg deleted file mode 100644 index aa527a5..0000000 --- a/.kokoro/samples/python3.10/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples-against-head.sh" -} diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg deleted file mode 100644 index 71cd1e5..0000000 --- a/.kokoro/samples/python3.10/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.10/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg deleted file mode 100644 index b7f70aa..0000000 --- a/.kokoro/samples/python3.11/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.11" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-311" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.11/continuous.cfg b/.kokoro/samples/python3.11/continuous.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.11/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg deleted file mode 100644 index aa527a5..0000000 --- a/.kokoro/samples/python3.11/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples-against-head.sh" -} diff --git a/.kokoro/samples/python3.11/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg deleted file mode 100644 index 71cd1e5..0000000 --- a/.kokoro/samples/python3.11/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/.kokoro/samples/python3.11/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.11/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg deleted file mode 100644 index f13d51d..0000000 --- a/.kokoro/samples/python3.7/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.7" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py37" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.7/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg deleted file mode 100644 index aa527a5..0000000 --- a/.kokoro/samples/python3.7/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples-against-head.sh" -} diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg deleted file mode 100644 index 71cd1e5..0000000 --- a/.kokoro/samples/python3.7/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.7/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg deleted file mode 100644 index a89f1b2..0000000 --- a/.kokoro/samples/python3.8/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.8" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py38" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.8/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg deleted file mode 100644 index aa527a5..0000000 --- a/.kokoro/samples/python3.8/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples-against-head.sh" -} diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg deleted file mode 100644 index 71cd1e5..0000000 --- a/.kokoro/samples/python3.8/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.8/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg deleted file mode 100644 index 11f9533..0000000 --- a/.kokoro/samples/python3.9/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.9" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py39" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-memcache/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.9/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg deleted file mode 100644 index aa527a5..0000000 --- a/.kokoro/samples/python3.9/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-memcache/.kokoro/test-samples-against-head.sh" -} diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg deleted file mode 100644 index 71cd1e5..0000000 --- a/.kokoro/samples/python3.9/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg deleted file mode 100644 index a1c8d97..0000000 --- a/.kokoro/samples/python3.9/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh deleted file mode 100755 index ba3a707..0000000 --- a/.kokoro/test-samples-against-head.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A customized test runner for samples. -# -# For periodic builds, you can specify this file for testing against head. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh deleted file mode 100755 index 2c6500c..0000000 --- a/.kokoro/test-samples-impl.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Exit early if samples don't exist -if ! find samples -name 'requirements.txt' | grep -q .; then - echo "No tests run. './samples/**/requirements.txt' not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.9 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.9 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - fi - - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh deleted file mode 100755 index 11c042d..0000000 --- a/.kokoro/test-samples.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# The default test runner for samples. -# -# For periodic builds, we rewinds the repo to the latest release, and -# run test-samples-impl.sh. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Run periodic samples tests at latest release -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - # preserving the test runner implementation. - cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - echo "Now we rewind the repo back to the latest release..." - LATEST_RELEASE=$(git describe --abbrev=0 --tags) - git checkout $LATEST_RELEASE - echo "The current head is: " - echo $(git rev-parse --verify HEAD) - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - # move back the test runner implementation if there's no file. - if [ ! -f .kokoro/test-samples-impl.sh ]; then - cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh - fi -fi - -exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh deleted file mode 100755 index f39236e..0000000 --- a/.kokoro/trampoline.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Always run the cleanup script, regardless of the success of bouncing into -# the container. -function cleanup() { - chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - echo "cleanup"; -} -trap cleanup EXIT - -$(dirname $0)/populate-secrets.sh # Secret Manager secrets. -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh deleted file mode 100755 index 4af6cdc..0000000 --- a/.kokoro/trampoline_v2.sh +++ /dev/null @@ -1,487 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# trampoline_v2.sh -# -# This script does 3 things. -# -# 1. Prepare the Docker image for the test -# 2. Run the Docker with appropriate flags to run the test -# 3. Upload the newly built Docker image -# -# in a way that is somewhat compatible with trampoline_v1. -# -# To run this script, first download few files from gcs to /dev/shm. -# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). -# -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm -# -# Then run the script. -# .kokoro/trampoline_v2.sh -# -# These environment variables are required: -# TRAMPOLINE_IMAGE: The docker image to use. -# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. -# -# You can optionally change these environment variables: -# TRAMPOLINE_IMAGE_UPLOAD: -# (true|false): Whether to upload the Docker image after the -# successful builds. -# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. -# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. -# Defaults to /workspace. -# Potentially there are some repo specific envvars in .trampolinerc in -# the project root. - - -set -euo pipefail - -TRAMPOLINE_VERSION="2.0.5" - -if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then - readonly IO_COLOR_RED="$(tput setaf 1)" - readonly IO_COLOR_GREEN="$(tput setaf 2)" - readonly IO_COLOR_YELLOW="$(tput setaf 3)" - readonly IO_COLOR_RESET="$(tput sgr0)" -else - readonly IO_COLOR_RED="" - readonly IO_COLOR_GREEN="" - readonly IO_COLOR_YELLOW="" - readonly IO_COLOR_RESET="" -fi - -function function_exists { - [ $(LC_ALL=C type -t $1)"" == "function" ] -} - -# Logs a message using the given color. The first argument must be one -# of the IO_COLOR_* variables defined above, such as -# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the -# given color. The log message will also have an RFC-3339 timestamp -# prepended (in UTC). You can disable the color output by setting -# TERM=vt100. -function log_impl() { - local color="$1" - shift - local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" - echo "================================================================" - echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" - echo "================================================================" -} - -# Logs the given message with normal coloring and a timestamp. -function log() { - log_impl "${IO_COLOR_RESET}" "$@" -} - -# Logs the given message in green with a timestamp. -function log_green() { - log_impl "${IO_COLOR_GREEN}" "$@" -} - -# Logs the given message in yellow with a timestamp. -function log_yellow() { - log_impl "${IO_COLOR_YELLOW}" "$@" -} - -# Logs the given message in red with a timestamp. -function log_red() { - log_impl "${IO_COLOR_RED}" "$@" -} - -readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) -readonly tmphome="${tmpdir}/h" -mkdir -p "${tmphome}" - -function cleanup() { - rm -rf "${tmpdir}" -} -trap cleanup EXIT - -RUNNING_IN_CI="${RUNNING_IN_CI:-false}" - -# The workspace in the container, defaults to /workspace. -TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" - -pass_down_envvars=( - # TRAMPOLINE_V2 variables. - # Tells scripts whether they are running as part of CI or not. - "RUNNING_IN_CI" - # Indicates which CI system we're in. - "TRAMPOLINE_CI" - # Indicates the version of the script. - "TRAMPOLINE_VERSION" -) - -log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" - -# Detect which CI systems we're in. If we're in any of the CI systems -# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be -# the name of the CI system. Both envvars will be passing down to the -# container for telling which CI system we're in. -if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then - # descriptive env var for indicating it's on CI. - RUNNING_IN_CI="true" - TRAMPOLINE_CI="kokoro" - if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then - if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then - log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." - exit 1 - fi - # This service account will be activated later. - TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" - else - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - gcloud auth list - fi - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet - fi - pass_down_envvars+=( - # KOKORO dynamic variables. - "KOKORO_BUILD_NUMBER" - "KOKORO_BUILD_ID" - "KOKORO_JOB_NAME" - "KOKORO_GIT_COMMIT" - "KOKORO_GITHUB_COMMIT" - "KOKORO_GITHUB_PULL_REQUEST_NUMBER" - "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For FlakyBot - "KOKORO_GITHUB_COMMIT_URL" - "KOKORO_GITHUB_PULL_REQUEST_URL" - ) -elif [[ "${TRAVIS:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="travis" - pass_down_envvars+=( - "TRAVIS_BRANCH" - "TRAVIS_BUILD_ID" - "TRAVIS_BUILD_NUMBER" - "TRAVIS_BUILD_WEB_URL" - "TRAVIS_COMMIT" - "TRAVIS_COMMIT_MESSAGE" - "TRAVIS_COMMIT_RANGE" - "TRAVIS_JOB_NAME" - "TRAVIS_JOB_NUMBER" - "TRAVIS_JOB_WEB_URL" - "TRAVIS_PULL_REQUEST" - "TRAVIS_PULL_REQUEST_BRANCH" - "TRAVIS_PULL_REQUEST_SHA" - "TRAVIS_PULL_REQUEST_SLUG" - "TRAVIS_REPO_SLUG" - "TRAVIS_SECURE_ENV_VARS" - "TRAVIS_TAG" - ) -elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="github-workflow" - pass_down_envvars+=( - "GITHUB_WORKFLOW" - "GITHUB_RUN_ID" - "GITHUB_RUN_NUMBER" - "GITHUB_ACTION" - "GITHUB_ACTIONS" - "GITHUB_ACTOR" - "GITHUB_REPOSITORY" - "GITHUB_EVENT_NAME" - "GITHUB_EVENT_PATH" - "GITHUB_SHA" - "GITHUB_REF" - "GITHUB_HEAD_REF" - "GITHUB_BASE_REF" - ) -elif [[ "${CIRCLECI:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="circleci" - pass_down_envvars+=( - "CIRCLE_BRANCH" - "CIRCLE_BUILD_NUM" - "CIRCLE_BUILD_URL" - "CIRCLE_COMPARE_URL" - "CIRCLE_JOB" - "CIRCLE_NODE_INDEX" - "CIRCLE_NODE_TOTAL" - "CIRCLE_PREVIOUS_BUILD_NUM" - "CIRCLE_PROJECT_REPONAME" - "CIRCLE_PROJECT_USERNAME" - "CIRCLE_REPOSITORY_URL" - "CIRCLE_SHA1" - "CIRCLE_STAGE" - "CIRCLE_USERNAME" - "CIRCLE_WORKFLOW_ID" - "CIRCLE_WORKFLOW_JOB_ID" - "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" - "CIRCLE_WORKFLOW_WORKSPACE_ID" - ) -fi - -# Configure the service account for pulling the docker image. -function repo_root() { - local dir="$1" - while [[ ! -d "${dir}/.git" ]]; do - dir="$(dirname "$dir")" - done - echo "${dir}" -} - -# Detect the project root. In CI builds, we assume the script is in -# the git tree and traverse from there, otherwise, traverse from `pwd` -# to find `.git` directory. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - PROGRAM_PATH="$(realpath "$0")" - PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" - PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" -else - PROJECT_ROOT="$(repo_root $(pwd))" -fi - -log_yellow "Changing to the project root: ${PROJECT_ROOT}." -cd "${PROJECT_ROOT}" - -# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need -# to use this environment variable in `PROJECT_ROOT`. -if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then - - mkdir -p "${tmpdir}/gcloud" - gcloud_config_dir="${tmpdir}/gcloud" - - log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." - export CLOUDSDK_CONFIG="${gcloud_config_dir}" - - log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." - gcloud auth activate-service-account \ - --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet -fi - -required_envvars=( - # The basic trampoline configurations. - "TRAMPOLINE_IMAGE" - "TRAMPOLINE_BUILD_FILE" -) - -if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then - source "${PROJECT_ROOT}/.trampolinerc" -fi - -log_yellow "Checking environment variables." -for e in "${required_envvars[@]}" -do - if [[ -z "${!e:-}" ]]; then - log "Missing ${e} env var. Aborting." - exit 1 - fi -done - -# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 -# script: e.g. "github/repo-name/.kokoro/run_tests.sh" -TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" -log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" - -# ignore error on docker operations and test execution -set +e - -log_yellow "Preparing Docker image." -# We only download the docker image in CI builds. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - # Download the docker image specified by `TRAMPOLINE_IMAGE` - - # We may want to add --max-concurrent-downloads flag. - - log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." - if docker pull "${TRAMPOLINE_IMAGE}"; then - log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="true" - else - log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="false" - fi -else - # For local run, check if we have the image. - if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then - has_image="true" - else - has_image="false" - fi -fi - - -# The default user for a Docker container has uid 0 (root). To avoid -# creating root-owned files in the build directory we tell docker to -# use the current user ID. -user_uid="$(id -u)" -user_gid="$(id -g)" -user_name="$(id -un)" - -# To allow docker in docker, we add the user to the docker group in -# the host os. -docker_gid=$(cut -d: -f3 < <(getent group docker)) - -update_cache="false" -if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then - # Build the Docker image from the source. - context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") - docker_build_flags=( - "-f" "${TRAMPOLINE_DOCKERFILE}" - "-t" "${TRAMPOLINE_IMAGE}" - "--build-arg" "UID=${user_uid}" - "--build-arg" "USERNAME=${user_name}" - ) - if [[ "${has_image}" == "true" ]]; then - docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") - fi - - log_yellow "Start building the docker image." - if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then - echo "docker build" "${docker_build_flags[@]}" "${context_dir}" - fi - - # ON CI systems, we want to suppress docker build logs, only - # output the logs when it fails. - if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - if docker build "${docker_build_flags[@]}" "${context_dir}" \ - > "${tmpdir}/docker_build.log" 2>&1; then - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - cat "${tmpdir}/docker_build.log" - fi - - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - log_yellow "Dumping the build logs:" - cat "${tmpdir}/docker_build.log" - exit 1 - fi - else - if docker build "${docker_build_flags[@]}" "${context_dir}"; then - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - exit 1 - fi - fi -else - if [[ "${has_image}" != "true" ]]; then - log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." - exit 1 - fi -fi - -# We use an array for the flags so they are easier to document. -docker_flags=( - # Remove the container after it exists. - "--rm" - - # Use the host network. - "--network=host" - - # Run in priviledged mode. We are not using docker for sandboxing or - # isolation, just for packaging our dev tools. - "--privileged" - - # Run the docker script with the user id. Because the docker image gets to - # write in ${PWD} you typically want this to be your user id. - # To allow docker in docker, we need to use docker gid on the host. - "--user" "${user_uid}:${docker_gid}" - - # Pass down the USER. - "--env" "USER=${user_name}" - - # Mount the project directory inside the Docker container. - "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" - "--workdir" "${TRAMPOLINE_WORKSPACE}" - "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" - - # Mount the temporary home directory. - "--volume" "${tmphome}:/h" - "--env" "HOME=/h" - - # Allow docker in docker. - "--volume" "/var/run/docker.sock:/var/run/docker.sock" - - # Mount the /tmp so that docker in docker can mount the files - # there correctly. - "--volume" "/tmp:/tmp" - # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR - # TODO(tmatsuo): This part is not portable. - "--env" "TRAMPOLINE_SECRET_DIR=/secrets" - "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" - "--env" "KOKORO_GFILE_DIR=/secrets/gfile" - "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" - "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" -) - -# Add an option for nicer output if the build gets a tty. -if [[ -t 0 ]]; then - docker_flags+=("-it") -fi - -# Passing down env vars -for e in "${pass_down_envvars[@]}" -do - if [[ -n "${!e:-}" ]]; then - docker_flags+=("--env" "${e}=${!e}") - fi -done - -# If arguments are given, all arguments will become the commands run -# in the container, otherwise run TRAMPOLINE_BUILD_FILE. -if [[ $# -ge 1 ]]; then - log_yellow "Running the given commands '" "${@:1}" "' in the container." - readonly commands=("${@:1}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" -else - log_yellow "Running the tests in a Docker container." - docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" -fi - - -test_retval=$? - -if [[ ${test_retval} -eq 0 ]]; then - log_green "Build finished with ${test_retval}" -else - log_red "Build finished with ${test_retval}" -fi - -# Only upload it when the test passes. -if [[ "${update_cache}" == "true" ]] && \ - [[ $test_retval == 0 ]] && \ - [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then - log_yellow "Uploading the Docker image." - if docker push "${TRAMPOLINE_IMAGE}"; then - log_green "Finished uploading the Docker image." - else - log_red "Failed uploading the Docker image." - fi - # Call trampoline_after_upload_hook if it's defined. - if function_exists trampoline_after_upload_hook; then - trampoline_after_upload_hook - fi - -fi - -exit "${test_retval}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index 5405cc8..0000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# See https://pre-commit.com for more information -# See https://pre-commit.com/hooks.html for more hooks -repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml -- repo: https://github.com/psf/black - rev: 22.3.0 - hooks: - - id: black -- repo: https://github.com/pycqa/flake8 - rev: 3.9.2 - hooks: - - id: flake8 diff --git a/.release-please-manifest.json b/.release-please-manifest.json deleted file mode 100644 index d0972da..0000000 --- a/.release-please-manifest.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - ".": "1.7.1" -} diff --git a/.trampolinerc b/.trampolinerc deleted file mode 100644 index 0eee72a..0000000 --- a/.trampolinerc +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Template for .trampolinerc - -# Add required env vars here. -required_envvars+=( -) - -# Add env vars which are passed down into the container here. -pass_down_envvars+=( - "NOX_SESSION" - ############### - # Docs builds - ############### - "STAGING_BUCKET" - "V2_STAGING_BUCKET" - ################## - # Samples builds - ################## - "INSTALL_LIBRARY_FROM_SOURCE" - "RUN_TESTS_SESSION" - "BUILD_SPECIFIC_GCLOUD_PROJECT" - # Target directories. - "RUN_TESTS_DIRS" - # The nox session to run. - "RUN_TESTS_SESSION" -) - -# Prevent unintentional override on the default image. -if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ - [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." - exit 1 -fi - -# Define the default value if it makes sense. -if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then - TRAMPOLINE_IMAGE_UPLOAD="" -fi - -if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - TRAMPOLINE_IMAGE="" -fi - -if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then - TRAMPOLINE_DOCKERFILE="" -fi - -if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then - TRAMPOLINE_BUILD_FILE="" -fi diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index ec00ed3..0000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,242 +0,0 @@ -# Changelog - -## [1.7.1](https://github.com/googleapis/python-memcache/compare/v1.7.0...v1.7.1) (2023-03-23) - - -### Documentation - -* Fix formatting of request arg in docstring ([#251](https://github.com/googleapis/python-memcache/issues/251)) ([dac4ef6](https://github.com/googleapis/python-memcache/commit/dac4ef673c0ff54178ee4e204e64c516cbdf392a)) - -## [1.7.0](https://github.com/googleapis/python-memcache/compare/v1.6.1...v1.7.0) (2023-02-16) - - -### Features - -* Enable "rest" transport in Python for services supporting numeric enums ([#245](https://github.com/googleapis/python-memcache/issues/245)) ([f702f7a](https://github.com/googleapis/python-memcache/commit/f702f7a08d7a31689e400969d23e4a1d8637dd41)) - -## [1.6.1](https://github.com/googleapis/python-memcache/compare/v1.6.0...v1.6.1) (2023-01-20) - - -### Bug Fixes - -* Add context manager return types ([29246d4](https://github.com/googleapis/python-memcache/commit/29246d4f4dc201a3faab34b3cb16f8629289be82)) - - -### Documentation - -* Add documentation for enums ([29246d4](https://github.com/googleapis/python-memcache/commit/29246d4f4dc201a3faab34b3cb16f8629289be82)) - -## [1.6.0](https://github.com/googleapis/python-memcache/compare/v1.5.0...v1.6.0) (2023-01-10) - - -### Features - -* Add support for python 3.11 ([#236](https://github.com/googleapis/python-memcache/issues/236)) ([36b98c5](https://github.com/googleapis/python-memcache/commit/36b98c5e33ea1f707d45d1e0d4cf91032d789a6e)) - -## [1.5.0](https://github.com/googleapis/python-memcache/compare/v1.4.4...v1.5.0) (2022-12-14) - - -### Features - -* Add support for `google.cloud.memcache.__version__` ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) -* Add typing to proto.Message based class attributes ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) -* Maintenance schedules ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) - - -### Bug Fixes - -* Add dict typing for client_options ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) -* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) -* Drop usage of pkg_resources ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) -* Fix timeout default values ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) - - -### Documentation - -* **samples:** Snippetgen handling of repeated enum field ([c9c771a](https://github.com/googleapis/python-memcache/commit/c9c771af7c188c8c3ce66113b41a475d290aa6c2)) -* **samples:** Snippetgen should call await on the operation coroutine before calling result ([b1f7a36](https://github.com/googleapis/python-memcache/commit/b1f7a36fa9649dcd345220f692c29f676d858cdc)) - -## [1.4.4](https://github.com/googleapis/python-memcache/compare/v1.4.3...v1.4.4) (2022-10-07) - - -### Bug Fixes - -* **deps:** Allow protobuf 3.19.5 ([#224](https://github.com/googleapis/python-memcache/issues/224)) ([90a04d3](https://github.com/googleapis/python-memcache/commit/90a04d303717f9a3decf88fc7516e788f57c2a2f)) - -## [1.4.3](https://github.com/googleapis/python-memcache/compare/v1.4.2...v1.4.3) (2022-09-29) - - -### Bug Fixes - -* **deps:** Require protobuf >= 3.20.2 ([#222](https://github.com/googleapis/python-memcache/issues/222)) ([2394f74](https://github.com/googleapis/python-memcache/commit/2394f7477a593b9c7271a581f02f8d570160a23d)) - -## [1.4.2](https://github.com/googleapis/python-memcache/compare/v1.4.1...v1.4.2) (2022-08-11) - - -### Bug Fixes - -* **deps:** allow protobuf < 5.0.0 ([#207](https://github.com/googleapis/python-memcache/issues/207)) ([8a21a06](https://github.com/googleapis/python-memcache/commit/8a21a069eae8ad4e3b0f33012d1f50cf547baafd)) -* **deps:** require proto-plus >= 1.22.0 ([8a21a06](https://github.com/googleapis/python-memcache/commit/8a21a069eae8ad4e3b0f33012d1f50cf547baafd)) - -## [1.4.1](https://github.com/googleapis/python-memcache/compare/v1.4.0...v1.4.1) (2022-07-13) - - -### Bug Fixes - -* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#199](https://github.com/googleapis/python-memcache/issues/199)) ([aa7978e](https://github.com/googleapis/python-memcache/commit/aa7978edd9b6fbe831775622ed3066e39112c2b1)) - -## [1.4.0](https://github.com/googleapis/python-memcache/compare/v1.3.2...v1.4.0) (2022-07-06) - - -### Features - -* add audience parameter ([9ef3f98](https://github.com/googleapis/python-memcache/commit/9ef3f98e1fb6d73ff9a0f3a9dc9fd74c60ba8c78)) - - -### Bug Fixes - -* **deps:** require google-api-core >= 2.8.0 ([#194](https://github.com/googleapis/python-memcache/issues/194)) ([9ef3f98](https://github.com/googleapis/python-memcache/commit/9ef3f98e1fb6d73ff9a0f3a9dc9fd74c60ba8c78)) -* exclude tests directory in packaging ([#195](https://github.com/googleapis/python-memcache/issues/195)) ([bfc330b](https://github.com/googleapis/python-memcache/commit/bfc330ba0db806ae59a1880414fb6404d78c3ea1)) -* require python 3.7+ ([#198](https://github.com/googleapis/python-memcache/issues/198)) ([a792592](https://github.com/googleapis/python-memcache/commit/a792592877e7ff83d5afe631dcf4d2246d33966c)) - -## [1.3.2](https://github.com/googleapis/python-memcache/compare/v1.3.1...v1.3.2) (2022-06-06) - - -### Bug Fixes - -* **deps:** require protobuf <4.0.0dev ([#184](https://github.com/googleapis/python-memcache/issues/184)) ([acc06a7](https://github.com/googleapis/python-memcache/commit/acc06a7c8564d272617a66456ac2a002b463443f)) - - -### Documentation - -* fix changelog header to consistent size ([#183](https://github.com/googleapis/python-memcache/issues/183)) ([3647e5f](https://github.com/googleapis/python-memcache/commit/3647e5f70d1d43e388d25f11fee9d730c453732d)) - -## [1.3.1](https://github.com/googleapis/python-memcache/compare/v1.3.0...v1.3.1) (2022-03-05) - - -### Bug Fixes - -* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#150](https://github.com/googleapis/python-memcache/issues/150)) ([fba1303](https://github.com/googleapis/python-memcache/commit/fba130344bb07512d8fc0355c2c2da158d9be8ff)) -* **deps:** require proto-plus>=1.15.0 ([fba1303](https://github.com/googleapis/python-memcache/commit/fba130344bb07512d8fc0355c2c2da158d9be8ff)) - -## [1.3.0](https://github.com/googleapis/python-memcache/compare/v1.2.1...v1.3.0) (2022-02-26) - - -### Features - -* add api key support ([#135](https://github.com/googleapis/python-memcache/issues/135)) ([ef5104e](https://github.com/googleapis/python-memcache/commit/ef5104e0922d980c0023b65665f29f27c14cddcc)) - - -### Bug Fixes - -* resolve DuplicateCredentialArgs error when using credentials_file ([5f8a2b4](https://github.com/googleapis/python-memcache/commit/5f8a2b4fe5fcc0c4a2be6b9f8529f4ceacbf6421)) - -## [1.2.1](https://www.github.com/googleapis/python-memcache/compare/v1.2.0...v1.2.1) (2021-11-01) - - -### Bug Fixes - -* **deps:** drop packaging dependency ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) -* **deps:** require google-api-core >= 1.28.0 ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) - - -### Documentation - -* list oneofs in docstring ([5159fe9](https://www.github.com/googleapis/python-memcache/commit/5159fe99b200979b54ce76633a7b8cda87931eee)) - -## [1.2.0](https://www.github.com/googleapis/python-memcache/compare/v1.1.3...v1.2.0) (2021-10-12) - - -### Features - -* add context manager support in client ([#111](https://www.github.com/googleapis/python-memcache/issues/111)) ([a385b99](https://www.github.com/googleapis/python-memcache/commit/a385b993b2473a01256042cc2c560f872c6b8c13)) - -## [1.1.3](https://www.github.com/googleapis/python-memcache/compare/v1.1.2...v1.1.3) (2021-09-30) - - -### Bug Fixes - -* improper types in pagers generation ([3680bac](https://www.github.com/googleapis/python-memcache/commit/3680bac8c702cc0313b06dbec3c0c6512ac4a58a)) - -## [1.1.2](https://www.github.com/googleapis/python-memcache/compare/v1.1.1...v1.1.2) (2021-09-24) - - -### Bug Fixes - -* add 'dict' annotation type to 'request' ([c56fbee](https://www.github.com/googleapis/python-memcache/commit/c56fbee0ffedac37a80bca5ca3028c53753ada5a)) - -## [1.1.1](https://www.github.com/googleapis/python-memcache/compare/v1.1.0...v1.1.1) (2021-07-26) - - -### Bug Fixes - -* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#82](https://www.github.com/googleapis/python-memcache/issues/82)) ([d4f2c96](https://www.github.com/googleapis/python-memcache/commit/d4f2c965c13c28f97bda9aa8ab570529747bd68d)) -* enable self signed jwt for grpc ([#88](https://www.github.com/googleapis/python-memcache/issues/88)) ([0ddd8eb](https://www.github.com/googleapis/python-memcache/commit/0ddd8eb6c91b799d443e4d09a20adcd25d9ef70a)) - - -### Documentation - -* add Samples section to CONTRIBUTING.rst ([#83](https://www.github.com/googleapis/python-memcache/issues/83)) ([9471485](https://www.github.com/googleapis/python-memcache/commit/94714851060def4b68ec065ae435b71ce94f41bc)) - - -### Miscellaneous Chores - -* release as 1.1.1 ([#87](https://www.github.com/googleapis/python-memcache/issues/87)) ([3182207](https://www.github.com/googleapis/python-memcache/commit/31822078c9a27c26f303f51106ccb0af587a35e4)) - -## [1.1.0](https://www.github.com/googleapis/python-memcache/compare/v1.0.0...v1.1.0) (2021-06-30) - - -### Features - -* add always_use_jwt_access ([#79](https://www.github.com/googleapis/python-memcache/issues/79)) ([e7f03bb](https://www.github.com/googleapis/python-memcache/commit/e7f03bb915eb523afcb72ec0d2dd275739f485e5)) -* support self-signed JWT flow for service accounts ([2d1aaf4](https://www.github.com/googleapis/python-memcache/commit/2d1aaf439d096857a727752ae129852b279c3658)) - - -### Bug Fixes - -* add async client to %name_%version/init.py ([2d1aaf4](https://www.github.com/googleapis/python-memcache/commit/2d1aaf439d096857a727752ae129852b279c3658)) - - -### Documentation - -* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-memcache/issues/1127)) ([#70](https://www.github.com/googleapis/python-memcache/issues/70)) ([f273025](https://www.github.com/googleapis/python-memcache/commit/f273025fedad32be0b766e40ab99b445f529cd13)) - -## [1.0.0](https://www.github.com/googleapis/python-memcache/compare/v0.3.0...v1.0.0) (2021-05-28) - - -### Features - -* bump release level to production/stable ([#59](https://www.github.com/googleapis/python-memcache/issues/59)) ([b8d9394](https://www.github.com/googleapis/python-memcache/commit/b8d9394dd34b97ddd68f8c73a5f516ba5294a70c)) -* support self-signed JWT flow for service accounts ([2ad1bfb](https://www.github.com/googleapis/python-memcache/commit/2ad1bfbee1f847c1b150b0e1595faba63f42d768)) - - -### Bug Fixes - -* add async client to %name_%version/init.py ([2ad1bfb](https://www.github.com/googleapis/python-memcache/commit/2ad1bfbee1f847c1b150b0e1595faba63f42d768)) - - -### Miscellaneous Chores - -* release 1.0.0 ([#62](https://www.github.com/googleapis/python-memcache/issues/62)) ([829a7b7](https://www.github.com/googleapis/python-memcache/commit/829a7b7b0cfedb3a18a61158d7aa949b178ae4fe)) - -## [0.3.0](https://www.github.com/googleapis/python-memcache/compare/v0.2.0...v0.3.0) (2021-02-10) - - -### Features - -* add async client ([#26](https://www.github.com/googleapis/python-memcache/issues/26)) ([0bbc337](https://www.github.com/googleapis/python-memcache/commit/0bbc337594e2a44c51a5b372670d72499592e2e0)) -* generate v1 ([#37](https://www.github.com/googleapis/python-memcache/issues/37)) ([7945daf](https://www.github.com/googleapis/python-memcache/commit/7945dafbbee1b21efc733e079044db77e880a10a)) - -## [0.2.0](https://www.github.com/googleapis/python-memcache/compare/v0.1.0...v0.2.0) (2020-05-28) - - -### Features - -* add mtls support ([#7](https://www.github.com/googleapis/python-memcache/issues/7)) ([669d2a9](https://www.github.com/googleapis/python-memcache/commit/669d2a985877971fb6c1eb0ad97806fbcfcc7399)) - -## 0.1.0 (2020-03-03) - - -### Features - -* generate v1beta2 ([8b4b6d8](https://www.github.com/googleapis/python-memcache/commit/8b4b6d888b5181deedc87436165e1ed327fe26f5)) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index 039f436..0000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,95 +0,0 @@ - -# Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to making participation in our project and -our community a harassment-free experience for everyone, regardless of age, body -size, disability, ethnicity, gender identity and expression, level of -experience, education, socio-economic status, nationality, personal appearance, -race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or - advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, or to ban temporarily or permanently any -contributor for other behaviors that they deem inappropriate, threatening, -offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. Examples of -representing a project or community include using an official project e-mail -address, posting via an official social media account, or acting as an appointed -representative at an online or offline event. Representation of a project may be -further defined and clarified by project maintainers. - -This Code of Conduct also applies outside the project spaces when the Project -Steward has a reasonable belief that an individual's behavior may have a -negative impact on the project or its community. - -## Conflict Resolution - -We do not believe that all conflict is bad; healthy debate and disagreement -often yield positive results. However, it is never okay to be disrespectful or -to engage in behavior that violates the project’s code of conduct. - -If you see someone violating the code of conduct, you are encouraged to address -the behavior directly with those involved. Many issues can be resolved quickly -and easily, and this gives people more control over the outcome of their -dispute. If you are unable to resolve the matter for any reason, or if the -behavior is threatening or harassing, report it. We are dedicated to providing -an environment where participants feel welcome and safe. - - -Reports should be directed to *googleapis-stewards@google.com*, the -Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to -receive and address reported violations of the code of conduct. They will then -work with a committee consisting of representatives from the Open Source -Programs Office and the Google Open Source Strategy team. If for any reason you -are uncomfortable reaching out to the Project Steward, please email -opensource@google.com. - -We will investigate every complaint, but you may not receive a direct response. -We will use our discretion in determining when and how to follow up on reported -incidents, which may range from not taking action to permanent expulsion from -the project and project-sponsored spaces. We will notify the accused of the -report and provide them an opportunity to discuss it before any action is taken. -The identity of the reporter will be omitted from the details of the report -supplied to the accused. In potentially harmful situations, such as ongoing -harassment or threats to anyone's safety, we may take action without notice. - -## Attribution - -This Code of Conduct is adapted from the Contributor Covenant, version 1.4, -available at -https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100644 index 91abec0..0000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,281 +0,0 @@ -.. Generated by synthtool. DO NOT EDIT! -############ -Contributing -############ - -#. **Please sign one of the contributor license agreements below.** -#. Fork the repo, develop and test your code changes, add docs. -#. Make sure that your commit messages clearly describe the changes. -#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) - -.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews - -.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. - -*************** -Adding Features -*************** - -In order to add a feature: - -- The feature must be documented in both the API and narrative - documentation. - -- The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. - -- The feature must not add unnecessary dependencies (where - "unnecessary" is of course subjective, but new dependencies should - be discussed). - -**************************** -Using a Development Checkout -**************************** - -You'll have to create a development environment using a Git checkout: - -- While logged into your GitHub account, navigate to the - ``python-memcache`` `repo`_ on GitHub. - -- Fork and clone the ``python-memcache`` repository to your GitHub account by - clicking the "Fork" button. - -- Clone your fork of ``python-memcache`` from your GitHub account to your local - computer, substituting your account username and specifying the destination - as ``hack-on-python-memcache``. E.g.:: - - $ cd ${HOME} - $ git clone git@github.com:USERNAME/python-memcache.git hack-on-python-memcache - $ cd hack-on-python-memcache - # Configure remotes such that you can pull changes from the googleapis/python-memcache - # repository into your local repository. - $ git remote add upstream git@github.com:googleapis/python-memcache.git - # fetch and merge changes from upstream into main - $ git fetch upstream - $ git merge upstream/main - -Now your local repo is set up such that you will push changes to your GitHub -repo, from which you can submit a pull request. - -To work on the codebase and run the tests, we recommend using ``nox``, -but you can also use a ``virtualenv`` of your own creation. - -.. _repo: https://github.com/googleapis/python-memcache - -Using ``nox`` -============= - -We use `nox `__ to instrument our tests. - -- To test your changes, run unit tests with ``nox``:: - $ nox -s unit - -- To run a single unit test:: - - $ nox -s unit-3.11 -- -k - - - .. note:: - - The unit tests and system tests are described in the - ``noxfile.py`` files in each directory. - -.. nox: https://pypi.org/project/nox/ - -***************************************** -I'm getting weird errors... Can you help? -***************************************** - -If the error mentions ``Python.h`` not being found, -install ``python-dev`` and try again. -On Debian/Ubuntu:: - - $ sudo apt-get install python-dev - -************ -Coding Style -************ -- We use the automatic code formatter ``black``. You can run it using - the nox session ``blacken``. This will eliminate many lint errors. Run via:: - - $ nox -s blacken - -- PEP8 compliance is required, with exceptions defined in the linter configuration. - If you have ``nox`` installed, you can test that you have not introduced - any non-compliant code via:: - - $ nox -s lint - -- In order to make ``nox -s lint`` run faster, you can set some environment - variables:: - - export GOOGLE_CLOUD_TESTING_REMOTE="upstream" - export GOOGLE_CLOUD_TESTING_BRANCH="main" - - By doing this, you are specifying the location of the most up-to-date - version of ``python-memcache``. The - remote name ``upstream`` should point to the official ``googleapis`` - checkout and the branch should be the default branch on that remote (``main``). - -- This repository contains configuration for the - `pre-commit `__ tool, which automates checking - our linters during a commit. If you have it installed on your ``$PATH``, - you can enable enforcing those checks via: - -.. code-block:: bash - - $ pre-commit install - pre-commit installed at .git/hooks/pre-commit - -Exceptions to PEP8: - -- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for - "Function-Under-Test"), which is PEP8-incompliant, but more readable. - Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). - -******************** -Running System Tests -******************** - -- To run system tests, you can execute:: - - # Run all system tests - $ nox -s system - - # Run a single system test - $ nox -s system-3.8 -- -k - - - .. note:: - - System tests are only configured to run under Python 3.8. - For expediency, we do not run them in older versions of Python 3. - - This alone will not run the tests. You'll need to change some local - auth settings and change some configuration in your project to - run all the tests. - -- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. - -************* -Test Coverage -************* - -- The codebase *must* have 100% test statement coverage after each commit. - You can test coverage via ``nox -s cover``. - -****************************************************** -Documentation Coverage and Building HTML Documentation -****************************************************** - -If you fix a bug, and the bug requires an API or behavior modification, all -documentation in this package which references that API or behavior must be -changed to reflect the bug fix, ideally in the same commit that fixes the bug -or adds the feature. - -Build the docs via: - - $ nox -s docs - -************************* -Samples and code snippets -************************* - -Code samples and snippets live in the `samples/` catalogue. Feel free to -provide more examples, but make sure to write tests for those examples. -Each folder containing example code requires its own `noxfile.py` script -which automates testing. If you decide to create a new folder, you can -base it on the `samples/snippets` folder (providing `noxfile.py` and -the requirements files). - -The tests will run against a real Google Cloud Project, so you should -configure them just like the System Tests. - -- To run sample tests, you can execute:: - - # Run all tests in a folder - $ cd samples/snippets - $ nox -s py-3.8 - - # Run a single sample test - $ cd samples/snippets - $ nox -s py-3.8 -- -k - -******************************************** -Note About ``README`` as it pertains to PyPI -******************************************** - -The `description on PyPI`_ for the project comes directly from the -``README``. Due to the reStructuredText (``rst``) parser used by -PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` -instead of -``https://github.com/googleapis/python-memcache/blob/main/CONTRIBUTING.rst``) -may cause problems creating links or rendering the description. - -.. _description on PyPI: https://pypi.org/project/google-cloud-memcache - - -************************* -Supported Python Versions -************************* - -We support: - -- `Python 3.7`_ -- `Python 3.8`_ -- `Python 3.9`_ -- `Python 3.10`_ -- `Python 3.11`_ - -.. _Python 3.7: https://docs.python.org/3.7/ -.. _Python 3.8: https://docs.python.org/3.8/ -.. _Python 3.9: https://docs.python.org/3.9/ -.. _Python 3.10: https://docs.python.org/3.10/ -.. _Python 3.11: https://docs.python.org/3.11/ - - -Supported versions can be found in our ``noxfile.py`` `config`_. - -.. _config: https://github.com/googleapis/python-memcache/blob/main/noxfile.py - - -We also explicitly decided to support Python 3 beginning with version 3.7. -Reasons for this include: - -- Encouraging use of newest versions of Python 3 -- Taking the lead of `prominent`_ open-source `projects`_ -- `Unicode literal support`_ which allows for a cleaner codebase that - works in both Python 2 and Python 3 - -.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django -.. _projects: http://flask.pocoo.org/docs/0.10/python3/ -.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ - -********** -Versioning -********** - -This library follows `Semantic Versioning`_. - -.. _Semantic Versioning: http://semver.org/ - -Some packages are currently in major version zero (``0.y.z``), which means that -anything may change at any time and the public API should not be considered -stable. - -****************************** -Contributor License Agreements -****************************** - -Before we can accept your pull requests you'll need to sign a Contributor -License Agreement (CLA): - -- **If you are an individual writing original source code** and **you own the - intellectual property**, then you'll need to sign an - `individual CLA `__. -- **If you work for a company that wants to allow you to contribute your work**, - then you'll need to sign a - `corporate CLA `__. - -You can sign these electronically (just scroll to the bottom). After that, -we'll be able to accept your pull requests. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index e783f4c..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -include README.rst LICENSE -recursive-include google *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ - -# Exclude scripts for samples readmegen -prune scripts/readme-gen diff --git a/README.rst b/README.rst index 0be0c55..d14c937 100644 --- a/README.rst +++ b/README.rst @@ -1,3 +1,8 @@ +:**NOTE**: **This github repository is archived. The repository contents and history have moved to** `google-cloud-python`_. + +.. _google-cloud-python: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache + + Python Client for Cloud Memorystore for Memcached API ===================================================== diff --git a/docs/README.rst b/docs/README.rst deleted file mode 120000 index 89a0106..0000000 --- a/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/docs/_static/custom.css b/docs/_static/custom.css deleted file mode 100644 index b0a2954..0000000 --- a/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html deleted file mode 100644 index 6316a53..0000000 --- a/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/docs/changelog.md b/docs/changelog.md deleted file mode 120000 index 04c99a5..0000000 --- a/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 1f40ca9..0000000 --- a/docs/conf.py +++ /dev/null @@ -1,384 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# google-cloud-memcache documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import os -import shlex -import sys - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.5.5" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = "google-cloud-memcache" -copyright = "2019, Google" -author = "Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-memcache", - "github_user": "googleapis", - "github_repo": "python-memcache", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-memcache-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-memcache.tex", - "google-cloud-memcache Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-memcache", - "google-cloud-memcache Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-memcache", - "google-cloud-memcache Documentation", - author, - "google-cloud-memcache", - "google-cloud-memcache Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index bf4a173..0000000 --- a/docs/index.rst +++ /dev/null @@ -1,34 +0,0 @@ -.. include:: README.rst - -.. include:: multiprocessing.rst - -This package includes clients for multiple versions of Cloud Memorystore for Memcached. -By default, you will get version ``memcache_v1``. - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - memcache_v1/services - memcache_v1/types - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - memcache_v1beta2/services - memcache_v1beta2/types - - -Changelog ---------- - -For a list of all ``google-cloud-memcache`` releases: - -.. toctree:: - :maxdepth: 2 - - changelog diff --git a/docs/memcache_v1/cloud_memcache.rst b/docs/memcache_v1/cloud_memcache.rst deleted file mode 100644 index 35de375..0000000 --- a/docs/memcache_v1/cloud_memcache.rst +++ /dev/null @@ -1,10 +0,0 @@ -CloudMemcache -------------------------------- - -.. automodule:: google.cloud.memcache_v1.services.cloud_memcache - :members: - :inherited-members: - -.. automodule:: google.cloud.memcache_v1.services.cloud_memcache.pagers - :members: - :inherited-members: diff --git a/docs/memcache_v1/services.rst b/docs/memcache_v1/services.rst deleted file mode 100644 index 01fd6b8..0000000 --- a/docs/memcache_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Memcache v1 API -========================================= -.. toctree:: - :maxdepth: 2 - - cloud_memcache diff --git a/docs/memcache_v1/types.rst b/docs/memcache_v1/types.rst deleted file mode 100644 index 5892ae0..0000000 --- a/docs/memcache_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Memcache v1 API -====================================== - -.. automodule:: google.cloud.memcache_v1.types - :members: - :show-inheritance: diff --git a/docs/memcache_v1beta2/cloud_memcache.rst b/docs/memcache_v1beta2/cloud_memcache.rst deleted file mode 100644 index 7dc7a82..0000000 --- a/docs/memcache_v1beta2/cloud_memcache.rst +++ /dev/null @@ -1,10 +0,0 @@ -CloudMemcache -------------------------------- - -.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache - :members: - :inherited-members: - -.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache.pagers - :members: - :inherited-members: diff --git a/docs/memcache_v1beta2/services.rst b/docs/memcache_v1beta2/services.rst deleted file mode 100644 index e5faef5..0000000 --- a/docs/memcache_v1beta2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Memcache v1beta2 API -============================================== -.. toctree:: - :maxdepth: 2 - - cloud_memcache diff --git a/docs/memcache_v1beta2/types.rst b/docs/memcache_v1beta2/types.rst deleted file mode 100644 index 1b47aa6..0000000 --- a/docs/memcache_v1beta2/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Memcache v1beta2 API -=========================================== - -.. automodule:: google.cloud.memcache_v1beta2.types - :members: - :show-inheritance: diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst deleted file mode 100644 index 536d17b..0000000 --- a/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py deleted file mode 100644 index 8865b97..0000000 --- a/google/cloud/memcache/__init__.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memcache import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( - CloudMemcacheAsyncClient, -) -from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient -from google.cloud.memcache_v1.types.cloud_memcache import ( - ApplyParametersRequest, - CreateInstanceRequest, - DeleteInstanceRequest, - GetInstanceRequest, - Instance, - ListInstancesRequest, - ListInstancesResponse, - LocationMetadata, - MaintenancePolicy, - MaintenanceSchedule, - MemcacheParameters, - MemcacheVersion, - OperationMetadata, - RescheduleMaintenanceRequest, - UpdateInstanceRequest, - UpdateParametersRequest, - WeeklyMaintenanceWindow, - ZoneMetadata, -) - -__all__ = ( - "CloudMemcacheClient", - "CloudMemcacheAsyncClient", - "ApplyParametersRequest", - "CreateInstanceRequest", - "DeleteInstanceRequest", - "GetInstanceRequest", - "Instance", - "ListInstancesRequest", - "ListInstancesResponse", - "LocationMetadata", - "MaintenancePolicy", - "MaintenanceSchedule", - "MemcacheParameters", - "OperationMetadata", - "RescheduleMaintenanceRequest", - "UpdateInstanceRequest", - "UpdateParametersRequest", - "WeeklyMaintenanceWindow", - "ZoneMetadata", - "MemcacheVersion", -) diff --git a/google/cloud/memcache/gapic_version.py b/google/cloud/memcache/gapic_version.py deleted file mode 100644 index 84856f0..0000000 --- a/google/cloud/memcache/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "1.7.1" # {x-release-please-version} diff --git a/google/cloud/memcache/py.typed b/google/cloud/memcache/py.typed deleted file mode 100644 index 7959cf4..0000000 --- a/google/cloud/memcache/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-memcache package uses inline types. diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py deleted file mode 100644 index cbaf61d..0000000 --- a/google/cloud/memcache_v1/__init__.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memcache_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.cloud_memcache import CloudMemcacheAsyncClient, CloudMemcacheClient -from .types.cloud_memcache import ( - ApplyParametersRequest, - CreateInstanceRequest, - DeleteInstanceRequest, - GetInstanceRequest, - Instance, - ListInstancesRequest, - ListInstancesResponse, - LocationMetadata, - MaintenancePolicy, - MaintenanceSchedule, - MemcacheParameters, - MemcacheVersion, - OperationMetadata, - RescheduleMaintenanceRequest, - UpdateInstanceRequest, - UpdateParametersRequest, - WeeklyMaintenanceWindow, - ZoneMetadata, -) - -__all__ = ( - "CloudMemcacheAsyncClient", - "ApplyParametersRequest", - "CloudMemcacheClient", - "CreateInstanceRequest", - "DeleteInstanceRequest", - "GetInstanceRequest", - "Instance", - "ListInstancesRequest", - "ListInstancesResponse", - "LocationMetadata", - "MaintenancePolicy", - "MaintenanceSchedule", - "MemcacheParameters", - "MemcacheVersion", - "OperationMetadata", - "RescheduleMaintenanceRequest", - "UpdateInstanceRequest", - "UpdateParametersRequest", - "WeeklyMaintenanceWindow", - "ZoneMetadata", -) diff --git a/google/cloud/memcache_v1/gapic_metadata.json b/google/cloud/memcache_v1/gapic_metadata.json deleted file mode 100644 index 2c6ccb4..0000000 --- a/google/cloud/memcache_v1/gapic_metadata.json +++ /dev/null @@ -1,148 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.memcache_v1", - "protoPackage": "google.cloud.memcache.v1", - "schema": "1.0", - "services": { - "CloudMemcache": { - "clients": { - "grpc": { - "libraryClient": "CloudMemcacheClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "RescheduleMaintenance": { - "methods": [ - "reschedule_maintenance" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudMemcacheAsyncClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "RescheduleMaintenance": { - "methods": [ - "reschedule_maintenance" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - }, - "rest": { - "libraryClient": "CloudMemcacheClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "RescheduleMaintenance": { - "methods": [ - "reschedule_maintenance" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - } - } - } - } -} diff --git a/google/cloud/memcache_v1/gapic_version.py b/google/cloud/memcache_v1/gapic_version.py deleted file mode 100644 index 84856f0..0000000 --- a/google/cloud/memcache_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "1.7.1" # {x-release-please-version} diff --git a/google/cloud/memcache_v1/py.typed b/google/cloud/memcache_v1/py.typed deleted file mode 100644 index 7959cf4..0000000 --- a/google/cloud/memcache_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-memcache package uses inline types. diff --git a/google/cloud/memcache_v1/services/__init__.py b/google/cloud/memcache_v1/services/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/google/cloud/memcache_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py deleted file mode 100644 index 61c41a1..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .async_client import CloudMemcacheAsyncClient -from .client import CloudMemcacheClient - -__all__ = ( - "CloudMemcacheClient", - "CloudMemcacheAsyncClient", -) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py deleted file mode 100644 index 4be06f5..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ /dev/null @@ -1,1644 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import ( - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, -) - -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core.client_options import ClientOptions -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.memcache_v1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - -from google.cloud.memcache_v1.services.cloud_memcache import pagers -from google.cloud.memcache_v1.types import cloud_memcache - -from .client import CloudMemcacheClient -from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport -from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport - - -class CloudMemcacheAsyncClient: - """Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - """ - - _client: CloudMemcacheClient - - DEFAULT_ENDPOINT = CloudMemcacheClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CloudMemcacheClient.DEFAULT_MTLS_ENDPOINT - - instance_path = staticmethod(CloudMemcacheClient.instance_path) - parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) - common_billing_account_path = staticmethod( - CloudMemcacheClient.common_billing_account_path - ) - parse_common_billing_account_path = staticmethod( - CloudMemcacheClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) - parse_common_folder_path = staticmethod( - CloudMemcacheClient.parse_common_folder_path - ) - common_organization_path = staticmethod( - CloudMemcacheClient.common_organization_path - ) - parse_common_organization_path = staticmethod( - CloudMemcacheClient.parse_common_organization_path - ) - common_project_path = staticmethod(CloudMemcacheClient.common_project_path) - parse_common_project_path = staticmethod( - CloudMemcacheClient.parse_common_project_path - ) - common_location_path = staticmethod(CloudMemcacheClient.common_location_path) - parse_common_location_path = staticmethod( - CloudMemcacheClient.parse_common_location_path - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudMemcacheAsyncClient: The constructed client. - """ - return CloudMemcacheClient.from_service_account_info.__func__(CloudMemcacheAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudMemcacheAsyncClient: The constructed client. - """ - return CloudMemcacheClient.from_service_account_file.__func__(CloudMemcacheAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return CloudMemcacheClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> CloudMemcacheTransport: - """Returns the transport used by the client instance. - - Returns: - CloudMemcacheTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial( - type(CloudMemcacheClient).get_transport_class, type(CloudMemcacheClient) - ) - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud memcache client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.CloudMemcacheTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CloudMemcacheClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - ) - - async def list_instances( - self, - request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesAsyncPager: - r"""Lists Instances in a given location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - async def sample_list_instances(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]]): - The request object. Request for - [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - parent (:class:`str`): - Required. The resource name of the instance location - using the form: - ``projects/{project_id}/locations/{location_id}`` where - ``location_id`` refers to a GCP region - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesAsyncPager: - Response for - [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.ListInstancesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_instances, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance( - self, - request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_memcache.Instance: - r"""Gets details of a single Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - async def sample_get_instance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]]): - The request object. Request for - [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. - name (:class:`str`): - Required. Memcached instance resource name in the - format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.memcache_v1.types.Instance: - A Memorystore for Memcached instance - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.GetInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_instance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance( - self, - request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance: Optional[cloud_memcache.Instance] = None, - instance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new Instance in a given location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - async def sample_create_instance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - instance = memcache_v1.Instance() - instance.name = "name_value" - instance.node_count = 1070 - instance.node_config.cpu_count = 976 - instance.node_config.memory_size_mb = 1505 - - request = memcache_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]]): - The request object. Request for - [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. - parent (:class:`str`): - Required. The resource name of the instance location - using the form: - ``projects/{project_id}/locations/{location_id}`` where - ``location_id`` refers to a GCP region - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance (:class:`google.cloud.memcache_v1.types.Instance`): - Required. A Memcached Instance - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (:class:`str`): - Required. The logical name of the Memcached instance in - the user project with the following restrictions: - - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the user project / location. - - If any of the above are not met, the API raises an - invalid argument error. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance, instance_id]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.CreateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance is not None: - request.instance = instance - if instance_id is not None: - request.instance_id = instance_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_instance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_instance( - self, - request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[cloud_memcache.Instance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an existing Instance in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - async def sample_update_instance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - instance = memcache_v1.Instance() - instance.name = "name_value" - instance.node_count = 1070 - instance.node_config.cpu_count = 976 - instance.node_config.memory_size_mb = 1505 - - request = memcache_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]]): - The request object. Request for - [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. - instance (:class:`google.cloud.memcache_v1.types.Instance`): - Required. A Memcached Instance. Only fields specified in - update_mask are updated. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - - - ``displayName`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, update_mask]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.UpdateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_instance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("instance.name", request.instance.name),) - ), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_parameters( - self, - request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, - *, - name: Optional[str] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - parameters: Optional[cloud_memcache.MemcacheParameters] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the defined Memcached parameters for an existing - instance. This method only stages the parameters, it must be - followed by ``ApplyParameters`` to apply the parameters to nodes - of the Memcached instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - async def sample_update_parameters(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.UpdateParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.update_parameters(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]]): - The request object. Request for - [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. - name (:class:`str`): - Required. Resource name of the - Memcached instance for which the - parameters should be updated. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - parameters (:class:`google.cloud.memcache_v1.types.MemcacheParameters`): - The parameters to apply to the - instance. - - This corresponds to the ``parameters`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, update_mask, parameters]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.UpdateParametersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if update_mask is not None: - request.update_mask = update_mask - if parameters is not None: - request.parameters = parameters - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_parameters, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance( - self, - request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - async def sample_delete_instance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]]): - The request object. Request for - [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. - name (:class:`str`): - Required. Memcached instance resource name in the - format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.DeleteInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_instance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def apply_parameters( - self, - request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, - *, - name: Optional[str] = None, - node_ids: Optional[MutableSequence[str]] = None, - apply_all: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""``ApplyParameters`` restarts the set of specified nodes in order - to update them to the current set of parameters for the - Memcached Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - async def sample_apply_parameters(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.ApplyParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.apply_parameters(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]]): - The request object. Request for - [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. - name (:class:`str`): - Required. Resource name of the - Memcached instance for which parameter - group updates should be applied. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_ids (:class:`MutableSequence[str]`): - Nodes to which the instance-level - parameter group is applied. - - This corresponds to the ``node_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - apply_all (:class:`bool`): - Whether to apply instance-level parameter group to all - nodes. If set to true, users are restricted from - specifying individual nodes, and ``ApplyParameters`` - updates all nodes within the instance. - - This corresponds to the ``apply_all`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, node_ids, apply_all]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.ApplyParametersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if apply_all is not None: - request.apply_all = apply_all - if node_ids: - request.node_ids.extend(node_ids) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.apply_parameters, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def reschedule_maintenance( - self, - request: Optional[ - Union[cloud_memcache.RescheduleMaintenanceRequest, dict] - ] = None, - *, - instance: Optional[str] = None, - reschedule_type: Optional[ - cloud_memcache.RescheduleMaintenanceRequest.RescheduleType - ] = None, - schedule_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Reschedules upcoming maintenance event. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - async def sample_reschedule_maintenance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.RescheduleMaintenanceRequest( - instance="instance_value", - reschedule_type="SPECIFIC_TIME", - ) - - # Make the request - operation = client.reschedule_maintenance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1.types.RescheduleMaintenanceRequest, dict]]): - The request object. Request for - [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. - instance (:class:`str`): - Required. Memcache instance resource name using the - form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - reschedule_type (:class:`google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType`): - Required. If reschedule type is SPECIFIC_TIME, must set - up schedule_time as well. - - This corresponds to the ``reschedule_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Timestamp when the maintenance shall be rescheduled to - if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, - for example ``2012-11-15T16:19:00.094Z``. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, reschedule_type, schedule_time]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.RescheduleMaintenanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if reschedule_type is not None: - request.reschedule_type = reschedule_type - if schedule_time is not None: - request.schedule_time = schedule_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reschedule_maintenance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py deleted file mode 100644 index 65dd566..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ /dev/null @@ -1,1878 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import ( - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.memcache_v1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - -from google.cloud.memcache_v1.services.cloud_memcache import pagers -from google.cloud.memcache_v1.types import cloud_memcache - -from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport -from .transports.grpc import CloudMemcacheGrpcTransport -from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport -from .transports.rest import CloudMemcacheRestTransport - - -class CloudMemcacheClientMeta(type): - """Metaclass for the CloudMemcache client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] - _transport_registry["grpc"] = CloudMemcacheGrpcTransport - _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport - _transport_registry["rest"] = CloudMemcacheRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[CloudMemcacheTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): - """Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "memcache.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudMemcacheClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudMemcacheClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudMemcacheTransport: - """Returns the transport used by the client instance. - - Returns: - CloudMemcacheTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def instance_path( - project: str, - location: str, - instance: str, - ) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, - location=location, - instance=instance, - ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str, str]: - """Parses a instance path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudMemcacheTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud memcache client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, CloudMemcacheTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options - ) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudMemcacheTransport): - # transport is a CloudMemcacheTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def list_instances( - self, - request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesPager: - r"""Lists Instances in a given location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - def sample_list_instances(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.memcache_v1.types.ListInstancesRequest, dict]): - The request object. Request for - [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - parent (str): - Required. The resource name of the instance location - using the form: - ``projects/{project_id}/locations/{location_id}`` where - ``location_id`` refers to a GCP region - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesPager: - Response for - [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.ListInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.ListInstancesRequest): - request = cloud_memcache.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance( - self, - request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_memcache.Instance: - r"""Gets details of a single Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - def sample_get_instance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1.types.GetInstanceRequest, dict]): - The request object. Request for - [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. - name (str): - Required. Memcached instance resource name in the - format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.memcache_v1.types.Instance: - A Memorystore for Memcached instance - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.GetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.GetInstanceRequest): - request = cloud_memcache.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance( - self, - request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance: Optional[cloud_memcache.Instance] = None, - instance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new Instance in a given location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - def sample_create_instance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - instance = memcache_v1.Instance() - instance.name = "name_value" - instance.node_count = 1070 - instance.node_config.cpu_count = 976 - instance.node_config.memory_size_mb = 1505 - - request = memcache_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1.types.CreateInstanceRequest, dict]): - The request object. Request for - [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. - parent (str): - Required. The resource name of the instance location - using the form: - ``projects/{project_id}/locations/{location_id}`` where - ``location_id`` refers to a GCP region - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance (google.cloud.memcache_v1.types.Instance): - Required. A Memcached Instance - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (str): - Required. The logical name of the Memcached instance in - the user project with the following restrictions: - - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the user project / location. - - If any of the above are not met, the API raises an - invalid argument error. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance, instance_id]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.CreateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.CreateInstanceRequest): - request = cloud_memcache.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance is not None: - request.instance = instance - if instance_id is not None: - request.instance_id = instance_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_instance( - self, - request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[cloud_memcache.Instance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates an existing Instance in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - def sample_update_instance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - instance = memcache_v1.Instance() - instance.name = "name_value" - instance.node_count = 1070 - instance.node_config.cpu_count = 976 - instance.node_config.memory_size_mb = 1505 - - request = memcache_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1.types.UpdateInstanceRequest, dict]): - The request object. Request for - [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. - instance (google.cloud.memcache_v1.types.Instance): - Required. A Memcached Instance. Only fields specified in - update_mask are updated. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - - - ``displayName`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, update_mask]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.UpdateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.UpdateInstanceRequest): - request = cloud_memcache.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("instance.name", request.instance.name),) - ), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_parameters( - self, - request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, - *, - name: Optional[str] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - parameters: Optional[cloud_memcache.MemcacheParameters] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates the defined Memcached parameters for an existing - instance. This method only stages the parameters, it must be - followed by ``ApplyParameters`` to apply the parameters to nodes - of the Memcached instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - def sample_update_parameters(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.UpdateParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.update_parameters(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1.types.UpdateParametersRequest, dict]): - The request object. Request for - [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. - name (str): - Required. Resource name of the - Memcached instance for which the - parameters should be updated. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - parameters (google.cloud.memcache_v1.types.MemcacheParameters): - The parameters to apply to the - instance. - - This corresponds to the ``parameters`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, update_mask, parameters]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.UpdateParametersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.UpdateParametersRequest): - request = cloud_memcache.UpdateParametersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if update_mask is not None: - request.update_mask = update_mask - if parameters is not None: - request.parameters = parameters - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_parameters] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_instance( - self, - request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - def sample_delete_instance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1.types.DeleteInstanceRequest, dict]): - The request object. Request for - [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. - name (str): - Required. Memcached instance resource name in the - format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.DeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.DeleteInstanceRequest): - request = cloud_memcache.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def apply_parameters( - self, - request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, - *, - name: Optional[str] = None, - node_ids: Optional[MutableSequence[str]] = None, - apply_all: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""``ApplyParameters`` restarts the set of specified nodes in order - to update them to the current set of parameters for the - Memcached Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - def sample_apply_parameters(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.ApplyParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.apply_parameters(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1.types.ApplyParametersRequest, dict]): - The request object. Request for - [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. - name (str): - Required. Resource name of the - Memcached instance for which parameter - group updates should be applied. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_ids (MutableSequence[str]): - Nodes to which the instance-level - parameter group is applied. - - This corresponds to the ``node_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - apply_all (bool): - Whether to apply instance-level parameter group to all - nodes. If set to true, users are restricted from - specifying individual nodes, and ``ApplyParameters`` - updates all nodes within the instance. - - This corresponds to the ``apply_all`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, node_ids, apply_all]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.ApplyParametersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.ApplyParametersRequest): - request = cloud_memcache.ApplyParametersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if node_ids is not None: - request.node_ids = node_ids - if apply_all is not None: - request.apply_all = apply_all - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.apply_parameters] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def reschedule_maintenance( - self, - request: Optional[ - Union[cloud_memcache.RescheduleMaintenanceRequest, dict] - ] = None, - *, - instance: Optional[str] = None, - reschedule_type: Optional[ - cloud_memcache.RescheduleMaintenanceRequest.RescheduleType - ] = None, - schedule_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Reschedules upcoming maintenance event. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1 - - def sample_reschedule_maintenance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.RescheduleMaintenanceRequest( - instance="instance_value", - reschedule_type="SPECIFIC_TIME", - ) - - # Make the request - operation = client.reschedule_maintenance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1.types.RescheduleMaintenanceRequest, dict]): - The request object. Request for - [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. - instance (str): - Required. Memcache instance resource name using the - form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - reschedule_type (google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType): - Required. If reschedule type is SPECIFIC_TIME, must set - up schedule_time as well. - - This corresponds to the ``reschedule_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when the maintenance shall be rescheduled to - if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, - for example ``2012-11-15T16:19:00.094Z``. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, reschedule_type, schedule_time]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.RescheduleMaintenanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.RescheduleMaintenanceRequest): - request = cloud_memcache.RescheduleMaintenanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if reschedule_type is not None: - request.reschedule_type = reschedule_type - if schedule_time is not None: - request.schedule_time = schedule_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "CloudMemcacheClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py deleted file mode 100644 index 306970d..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, -) - -from google.cloud.memcache_v1.types import cloud_memcache - - -class ListInstancesPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.memcache_v1.types.ListInstancesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.memcache_v1.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., cloud_memcache.ListInstancesResponse], - request: cloud_memcache.ListInstancesRequest, - response: cloud_memcache.ListInstancesResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.memcache_v1.types.ListInstancesRequest): - The initial request object. - response (google.cloud.memcache_v1.types.ListInstancesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloud_memcache.ListInstancesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cloud_memcache.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cloud_memcache.Instance]: - for page in self.pages: - yield from page.instances - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListInstancesAsyncPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.memcache_v1.types.ListInstancesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.memcache_v1.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[cloud_memcache.ListInstancesResponse]], - request: cloud_memcache.ListInstancesRequest, - response: cloud_memcache.ListInstancesResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.memcache_v1.types.ListInstancesRequest): - The initial request object. - response (google.cloud.memcache_v1.types.ListInstancesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloud_memcache.ListInstancesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cloud_memcache.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[cloud_memcache.Instance]: - async def async_generator(): - async for page in self.pages: - for response in page.instances: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py deleted file mode 100644 index 8e6a821..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CloudMemcacheTransport -from .grpc import CloudMemcacheGrpcTransport -from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport -from .rest import CloudMemcacheRestInterceptor, CloudMemcacheRestTransport - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] -_transport_registry["grpc"] = CloudMemcacheGrpcTransport -_transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport -_transport_registry["rest"] = CloudMemcacheRestTransport - -__all__ = ( - "CloudMemcacheTransport", - "CloudMemcacheGrpcTransport", - "CloudMemcacheGrpcAsyncIOTransport", - "CloudMemcacheRestTransport", - "CloudMemcacheRestInterceptor", -) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py deleted file mode 100644 index 3f787c2..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ /dev/null @@ -1,317 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, operations_v1 -from google.api_core import retry as retries -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.memcache_v1 import gapic_version as package_version -from google.cloud.memcache_v1.types import cloud_memcache - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -class CloudMemcacheTransport(abc.ABC): - """Abstract transport class for CloudMemcache.""" - - AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - DEFAULT_HOST: str = "memcache.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, - default_timeout=1200.0, - client_info=client_info, - ), - self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, - default_timeout=1200.0, - client_info=client_info, - ), - self.create_instance: gapic_v1.method.wrap_method( - self.create_instance, - default_timeout=1200.0, - client_info=client_info, - ), - self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, - default_timeout=1200.0, - client_info=client_info, - ), - self.update_parameters: gapic_v1.method.wrap_method( - self.update_parameters, - default_timeout=1200.0, - client_info=client_info, - ), - self.delete_instance: gapic_v1.method.wrap_method( - self.delete_instance, - default_timeout=1200.0, - client_info=client_info, - ), - self.apply_parameters: gapic_v1.method.wrap_method( - self.apply_parameters, - default_timeout=1200.0, - client_info=client_info, - ), - self.reschedule_maintenance: gapic_v1.method.wrap_method( - self.reschedule_maintenance, - default_timeout=1200.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_instances( - self, - ) -> Callable[ - [cloud_memcache.ListInstancesRequest], - Union[ - cloud_memcache.ListInstancesResponse, - Awaitable[cloud_memcache.ListInstancesResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_instance( - self, - ) -> Callable[ - [cloud_memcache.GetInstanceRequest], - Union[cloud_memcache.Instance, Awaitable[cloud_memcache.Instance]], - ]: - raise NotImplementedError() - - @property - def create_instance( - self, - ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def update_instance( - self, - ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def update_parameters( - self, - ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def delete_instance( - self, - ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def apply_parameters( - self, - ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def reschedule_maintenance( - self, - ) -> Callable[ - [cloud_memcache.RescheduleMaintenanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[ - operations_pb2.ListOperationsResponse, - Awaitable[operations_pb2.ListOperationsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: - raise NotImplementedError() - - @property - def get_location( - self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations( - self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[ - locations_pb2.ListLocationsResponse, - Awaitable[locations_pb2.ListLocationsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py deleted file mode 100644 index c190673..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ /dev/null @@ -1,598 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Callable, Dict, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, grpc_helpers, operations_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -import grpc # type: ignore - -from google.cloud.memcache_v1.types import cloud_memcache - -from .base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport - - -class CloudMemcacheGrpcTransport(CloudMemcacheTransport): - """gRPC backend transport for CloudMemcache. - - Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _stubs: Dict[str, Callable] - - def __init__( - self, - *, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel( - cls, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service.""" - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) - - # Return the client from cache. - return self._operations_client - - @property - def list_instances( - self, - ) -> Callable[ - [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse - ]: - r"""Return a callable for the list instances method over gRPC. - - Lists Instances in a given location. - - Returns: - Callable[[~.ListInstancesRequest], - ~.ListInstancesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_instances" not in self._stubs: - self._stubs["list_instances"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/ListInstances", - request_serializer=cloud_memcache.ListInstancesRequest.serialize, - response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, - ) - return self._stubs["list_instances"] - - @property - def get_instance( - self, - ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: - r"""Return a callable for the get instance method over gRPC. - - Gets details of a single Instance. - - Returns: - Callable[[~.GetInstanceRequest], - ~.Instance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_instance" not in self._stubs: - self._stubs["get_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/GetInstance", - request_serializer=cloud_memcache.GetInstanceRequest.serialize, - response_deserializer=cloud_memcache.Instance.deserialize, - ) - return self._stubs["get_instance"] - - @property - def create_instance( - self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: - r"""Return a callable for the create instance method over gRPC. - - Creates a new Instance in a given location. - - Returns: - Callable[[~.CreateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_instance" not in self._stubs: - self._stubs["create_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", - request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["create_instance"] - - @property - def update_instance( - self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: - r"""Return a callable for the update instance method over gRPC. - - Updates an existing Instance in a given project and - location. - - Returns: - Callable[[~.UpdateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_instance" not in self._stubs: - self._stubs["update_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", - request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_instance"] - - @property - def update_parameters( - self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: - r"""Return a callable for the update parameters method over gRPC. - - Updates the defined Memcached parameters for an existing - instance. This method only stages the parameters, it must be - followed by ``ApplyParameters`` to apply the parameters to nodes - of the Memcached instance. - - Returns: - Callable[[~.UpdateParametersRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_parameters" not in self._stubs: - self._stubs["update_parameters"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", - request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_parameters"] - - @property - def delete_instance( - self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes a single Instance. - - Returns: - Callable[[~.DeleteInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_instance" not in self._stubs: - self._stubs["delete_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", - request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["delete_instance"] - - @property - def apply_parameters( - self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: - r"""Return a callable for the apply parameters method over gRPC. - - ``ApplyParameters`` restarts the set of specified nodes in order - to update them to the current set of parameters for the - Memcached Instance. - - Returns: - Callable[[~.ApplyParametersRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "apply_parameters" not in self._stubs: - self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", - request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["apply_parameters"] - - @property - def reschedule_maintenance( - self, - ) -> Callable[ - [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation - ]: - r"""Return a callable for the reschedule maintenance method over gRPC. - - Reschedules upcoming maintenance event. - - Returns: - Callable[[~.RescheduleMaintenanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "reschedule_maintenance" not in self._stubs: - self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/RescheduleMaintenance", - request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["reschedule_maintenance"] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse - ]: - r"""Return a callable for the list_operations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse - ]: - r"""Return a callable for the list locations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py deleted file mode 100644 index cfc1ad6..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ /dev/null @@ -1,613 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.memcache_v1.types import cloud_memcache - -from .base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport -from .grpc import CloudMemcacheGrpcTransport - - -class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): - """gRPC AsyncIO backend transport for CloudMemcache. - - Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel( - cls, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - def __init__( - self, - *, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instances( - self, - ) -> Callable[ - [cloud_memcache.ListInstancesRequest], - Awaitable[cloud_memcache.ListInstancesResponse], - ]: - r"""Return a callable for the list instances method over gRPC. - - Lists Instances in a given location. - - Returns: - Callable[[~.ListInstancesRequest], - Awaitable[~.ListInstancesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_instances" not in self._stubs: - self._stubs["list_instances"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/ListInstances", - request_serializer=cloud_memcache.ListInstancesRequest.serialize, - response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, - ) - return self._stubs["list_instances"] - - @property - def get_instance( - self, - ) -> Callable[ - [cloud_memcache.GetInstanceRequest], Awaitable[cloud_memcache.Instance] - ]: - r"""Return a callable for the get instance method over gRPC. - - Gets details of a single Instance. - - Returns: - Callable[[~.GetInstanceRequest], - Awaitable[~.Instance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_instance" not in self._stubs: - self._stubs["get_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/GetInstance", - request_serializer=cloud_memcache.GetInstanceRequest.serialize, - response_deserializer=cloud_memcache.Instance.deserialize, - ) - return self._stubs["get_instance"] - - @property - def create_instance( - self, - ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the create instance method over gRPC. - - Creates a new Instance in a given location. - - Returns: - Callable[[~.CreateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_instance" not in self._stubs: - self._stubs["create_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", - request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["create_instance"] - - @property - def update_instance( - self, - ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the update instance method over gRPC. - - Updates an existing Instance in a given project and - location. - - Returns: - Callable[[~.UpdateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_instance" not in self._stubs: - self._stubs["update_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", - request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_instance"] - - @property - def update_parameters( - self, - ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the update parameters method over gRPC. - - Updates the defined Memcached parameters for an existing - instance. This method only stages the parameters, it must be - followed by ``ApplyParameters`` to apply the parameters to nodes - of the Memcached instance. - - Returns: - Callable[[~.UpdateParametersRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_parameters" not in self._stubs: - self._stubs["update_parameters"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", - request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_parameters"] - - @property - def delete_instance( - self, - ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes a single Instance. - - Returns: - Callable[[~.DeleteInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_instance" not in self._stubs: - self._stubs["delete_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", - request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["delete_instance"] - - @property - def apply_parameters( - self, - ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the apply parameters method over gRPC. - - ``ApplyParameters`` restarts the set of specified nodes in order - to update them to the current set of parameters for the - Memcached Instance. - - Returns: - Callable[[~.ApplyParametersRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "apply_parameters" not in self._stubs: - self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", - request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["apply_parameters"] - - @property - def reschedule_maintenance( - self, - ) -> Callable[ - [cloud_memcache.RescheduleMaintenanceRequest], - Awaitable[operations_pb2.Operation], - ]: - r"""Return a callable for the reschedule maintenance method over gRPC. - - Reschedules upcoming maintenance event. - - Returns: - Callable[[~.RescheduleMaintenanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "reschedule_maintenance" not in self._stubs: - self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1.CloudMemcache/RescheduleMaintenance", - request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["reschedule_maintenance"] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse - ]: - r"""Return a callable for the list_operations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse - ]: - r"""Return a callable for the list locations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py deleted file mode 100644 index 1a79678..0000000 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py +++ /dev/null @@ -1,1865 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import ( - gapic_v1, - operations_v1, - path_template, - rest_helpers, - rest_streaming, -) -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.cloud.memcache_v1.types import cloud_memcache - -from .base import CloudMemcacheTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CloudMemcacheRestInterceptor: - """Interceptor for CloudMemcache. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CloudMemcacheRestTransport. - - .. code-block:: python - class MyCustomCloudMemcacheInterceptor(CloudMemcacheRestInterceptor): - def pre_apply_parameters(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_apply_parameters(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instances(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instances(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_reschedule_maintenance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_reschedule_maintenance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_parameters(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_parameters(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CloudMemcacheRestTransport(interceptor=MyCustomCloudMemcacheInterceptor()) - client = CloudMemcacheClient(transport=transport) - - - """ - - def pre_apply_parameters( - self, - request: cloud_memcache.ApplyParametersRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.ApplyParametersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for apply_parameters - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_apply_parameters( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for apply_parameters - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_create_instance( - self, - request: cloud_memcache.CreateInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.CreateInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_create_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_instance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_delete_instance( - self, - request: cloud_memcache.DeleteInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_delete_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_instance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_get_instance( - self, - request: cloud_memcache.GetInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.GetInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_get_instance( - self, response: cloud_memcache.Instance - ) -> cloud_memcache.Instance: - """Post-rpc interceptor for get_instance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_list_instances( - self, - request: cloud_memcache.ListInstancesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.ListInstancesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_instances - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_list_instances( - self, response: cloud_memcache.ListInstancesResponse - ) -> cloud_memcache.ListInstancesResponse: - """Post-rpc interceptor for list_instances - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_reschedule_maintenance( - self, - request: cloud_memcache.RescheduleMaintenanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for reschedule_maintenance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_reschedule_maintenance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for reschedule_maintenance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_update_instance( - self, - request: cloud_memcache.UpdateInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_update_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_update_parameters( - self, - request: cloud_memcache.UpdateParametersRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.UpdateParametersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_parameters - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_update_parameters( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_parameters - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_get_location( - self, - request: locations_pb2.GetLocationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, - request: locations_pb2.ListLocationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, - request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, - request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, - request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CloudMemcacheRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CloudMemcacheRestInterceptor - - -class CloudMemcacheRestTransport(CloudMemcacheTransport): - """REST backend transport for CloudMemcache. - - Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[CloudMemcacheRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CloudMemcacheRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.CancelOperation": [ - { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ], - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*}/operations", - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1", - ) - - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport - ) - - # Return the client from cache. - return self._operations_client - - class _ApplyParameters(CloudMemcacheRestStub): - def __hash__(self): - return hash("ApplyParameters") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.ApplyParametersRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the apply parameters method over HTTP. - - Args: - request (~.cloud_memcache.ApplyParametersRequest): - The request object. Request for - [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/instances/*}:applyParameters", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_apply_parameters( - request, metadata - ) - pb_request = cloud_memcache.ApplyParametersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_apply_parameters(resp) - return resp - - class _CreateInstance(CloudMemcacheRestStub): - def __hash__(self): - return hash("CreateInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId": "", - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.CreateInstanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the create instance method over HTTP. - - Args: - request (~.cloud_memcache.CreateInstanceRequest): - The request object. Request for - [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/instances", - "body": "instance", - }, - ] - request, metadata = self._interceptor.pre_create_instance(request, metadata) - pb_request = cloud_memcache.CreateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_instance(resp) - return resp - - class _DeleteInstance(CloudMemcacheRestStub): - def __hash__(self): - return hash("DeleteInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.DeleteInstanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete instance method over HTTP. - - Args: - request (~.cloud_memcache.DeleteInstanceRequest): - The request object. Request for - [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/instances/*}", - }, - ] - request, metadata = self._interceptor.pre_delete_instance(request, metadata) - pb_request = cloud_memcache.DeleteInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_instance(resp) - return resp - - class _GetInstance(CloudMemcacheRestStub): - def __hash__(self): - return hash("GetInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.GetInstanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_memcache.Instance: - r"""Call the get instance method over HTTP. - - Args: - request (~.cloud_memcache.GetInstanceRequest): - The request object. Request for - [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloud_memcache.Instance: - A Memorystore for Memcached instance - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/instances/*}", - }, - ] - request, metadata = self._interceptor.pre_get_instance(request, metadata) - pb_request = cloud_memcache.GetInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloud_memcache.Instance() - pb_resp = cloud_memcache.Instance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_instance(resp) - return resp - - class _ListInstances(CloudMemcacheRestStub): - def __hash__(self): - return hash("ListInstances") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.ListInstancesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_memcache.ListInstancesResponse: - r"""Call the list instances method over HTTP. - - Args: - request (~.cloud_memcache.ListInstancesRequest): - The request object. Request for - [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloud_memcache.ListInstancesResponse: - Response for - [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/instances", - }, - ] - request, metadata = self._interceptor.pre_list_instances(request, metadata) - pb_request = cloud_memcache.ListInstancesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloud_memcache.ListInstancesResponse() - pb_resp = cloud_memcache.ListInstancesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_instances(resp) - return resp - - class _RescheduleMaintenance(CloudMemcacheRestStub): - def __hash__(self): - return hash("RescheduleMaintenance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.RescheduleMaintenanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the reschedule maintenance method over HTTP. - - Args: - request (~.cloud_memcache.RescheduleMaintenanceRequest): - The request object. Request for - [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{instance=projects/*/locations/*/instances/*}:rescheduleMaintenance", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_reschedule_maintenance( - request, metadata - ) - pb_request = cloud_memcache.RescheduleMaintenanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reschedule_maintenance(resp) - return resp - - class _UpdateInstance(CloudMemcacheRestStub): - def __hash__(self): - return hash("UpdateInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask": {}, - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.UpdateInstanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the update instance method over HTTP. - - Args: - request (~.cloud_memcache.UpdateInstanceRequest): - The request object. Request for - [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", - "body": "instance", - }, - ] - request, metadata = self._interceptor.pre_update_instance(request, metadata) - pb_request = cloud_memcache.UpdateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_instance(resp) - return resp - - class _UpdateParameters(CloudMemcacheRestStub): - def __hash__(self): - return hash("UpdateParameters") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.UpdateParametersRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the update parameters method over HTTP. - - Args: - request (~.cloud_memcache.UpdateParametersRequest): - The request object. Request for - [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{name=projects/*/locations/*/instances/*}:updateParameters", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_update_parameters( - request, metadata - ) - pb_request = cloud_memcache.UpdateParametersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_parameters(resp) - return resp - - @property - def apply_parameters( - self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ApplyParameters(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_instance( - self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_instance( - self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance( - self, - ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instances( - self, - ) -> Callable[ - [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore - - @property - def reschedule_maintenance( - self, - ) -> Callable[ - [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance( - self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_parameters( - self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateParameters(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(CloudMemcacheRestStub): - def __call__( - self, - request: locations_pb2.GetLocationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_location(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = locations_pb2.Location() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_location(resp) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(CloudMemcacheRestStub): - def __call__( - self, - request: locations_pb2.ListLocationsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*}/locations", - }, - ] - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_locations(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(CloudMemcacheRestStub): - def __call__( - self, - request: operations_pb2.CancelOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation( - request, metadata - ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - body = json.loads(json.dumps(transcoded_request["body"])) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(CloudMemcacheRestStub): - def __call__( - self, - request: operations_pb2.DeleteOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ] - - request, metadata = self._interceptor.pre_delete_operation( - request, metadata - ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(CloudMemcacheRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(CloudMemcacheRestStub): - def __call__( - self, - request: operations_pb2.ListOperationsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*}/operations", - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("CloudMemcacheRestTransport",) diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py deleted file mode 100644 index 3887af7..0000000 --- a/google/cloud/memcache_v1/types/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloud_memcache import ( - ApplyParametersRequest, - CreateInstanceRequest, - DeleteInstanceRequest, - GetInstanceRequest, - Instance, - ListInstancesRequest, - ListInstancesResponse, - LocationMetadata, - MaintenancePolicy, - MaintenanceSchedule, - MemcacheParameters, - MemcacheVersion, - OperationMetadata, - RescheduleMaintenanceRequest, - UpdateInstanceRequest, - UpdateParametersRequest, - WeeklyMaintenanceWindow, - ZoneMetadata, -) - -__all__ = ( - "ApplyParametersRequest", - "CreateInstanceRequest", - "DeleteInstanceRequest", - "GetInstanceRequest", - "Instance", - "ListInstancesRequest", - "ListInstancesResponse", - "LocationMetadata", - "MaintenancePolicy", - "MaintenanceSchedule", - "MemcacheParameters", - "OperationMetadata", - "RescheduleMaintenanceRequest", - "UpdateInstanceRequest", - "UpdateParametersRequest", - "WeeklyMaintenanceWindow", - "ZoneMetadata", - "MemcacheVersion", -) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py deleted file mode 100644 index a01dc01..0000000 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ /dev/null @@ -1,922 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.cloud.memcache.v1", - manifest={ - "MemcacheVersion", - "Instance", - "MaintenancePolicy", - "WeeklyMaintenanceWindow", - "MaintenanceSchedule", - "RescheduleMaintenanceRequest", - "ListInstancesRequest", - "ListInstancesResponse", - "GetInstanceRequest", - "CreateInstanceRequest", - "UpdateInstanceRequest", - "DeleteInstanceRequest", - "ApplyParametersRequest", - "UpdateParametersRequest", - "MemcacheParameters", - "OperationMetadata", - "LocationMetadata", - "ZoneMetadata", - }, -) - - -class MemcacheVersion(proto.Enum): - r"""Memcached versions supported by our service. - - Values: - MEMCACHE_VERSION_UNSPECIFIED (0): - No description available. - MEMCACHE_1_5 (1): - Memcached 1.5 version. - """ - MEMCACHE_VERSION_UNSPECIFIED = 0 - MEMCACHE_1_5 = 1 - - -class Instance(proto.Message): - r"""A Memorystore for Memcached instance - - Attributes: - name (str): - Required. Unique name of the resource in this scope - including project and location using the form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note: Memcached instances are managed and addressed at the - regional level so ``location_id`` here refers to a Google - Cloud region; however, users may choose which zones - Memcached nodes should be provisioned in within an instance. - Refer to [zones][google.cloud.memcache.v1.Instance.zones] - field for more details. - display_name (str): - User provided name for the instance, which is - only used for display purposes. Cannot be more - than 80 characters. - labels (MutableMapping[str, str]): - Resource labels to represent user-provided - metadata. Refer to cloud documentation on labels - for more details. - https://cloud.google.com/compute/docs/labeling-resources - authorized_network (str): - The full name of the Google Compute Engine - `network `__ - to which the instance is connected. If left unspecified, the - ``default`` network will be used. - zones (MutableSequence[str]): - Zones in which Memcached nodes should be - provisioned. Memcached nodes will be equally - distributed across these zones. If not provided, - the service will by default create nodes in all - zones in the region for the instance. - node_count (int): - Required. Number of nodes in the Memcached - instance. - node_config (google.cloud.memcache_v1.types.Instance.NodeConfig): - Required. Configuration for Memcached nodes. - memcache_version (google.cloud.memcache_v1.types.MemcacheVersion): - The major version of Memcached software. If not provided, - latest supported version will be used. Currently the latest - supported major version is ``MEMCACHE_1_5``. The minor - version will be automatically determined by our system based - on the latest supported minor version. - parameters (google.cloud.memcache_v1.types.MemcacheParameters): - User defined parameters to apply to the - memcached process on each node. - memcache_nodes (MutableSequence[google.cloud.memcache_v1.types.Instance.Node]): - Output only. List of Memcached nodes. Refer to - [Node][google.cloud.memcache.v1.Instance.Node] message for - more details. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the instance was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the instance was - updated. - state (google.cloud.memcache_v1.types.Instance.State): - Output only. The state of this Memcached - instance. - memcache_full_version (str): - Output only. The full version of memcached - server running on this instance. System - automatically determines the full memcached - version for an instance based on the input - MemcacheVersion. - The full version format will be - "memcached-1.5.16". - instance_messages (MutableSequence[google.cloud.memcache_v1.types.Instance.InstanceMessage]): - List of messages that describe the current - state of the Memcached instance. - discovery_endpoint (str): - Output only. Endpoint for the Discovery API. - maintenance_policy (google.cloud.memcache_v1.types.MaintenancePolicy): - The maintenance policy for the instance. If - not provided, the maintenance event will be - performed based on Memorystore internal rollout - schedule. - maintenance_schedule (google.cloud.memcache_v1.types.MaintenanceSchedule): - Output only. Published maintenance schedule. - """ - - class State(proto.Enum): - r"""Different states of a Memcached instance. - - Values: - STATE_UNSPECIFIED (0): - State not set. - CREATING (1): - Memcached instance is being created. - READY (2): - Memcached instance has been created and ready - to be used. - UPDATING (3): - Memcached instance is updating configuration - such as maintenance policy and schedule. - DELETING (4): - Memcached instance is being deleted. - PERFORMING_MAINTENANCE (5): - Memcached instance is going through - maintenance, e.g. data plane rollout. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - UPDATING = 3 - DELETING = 4 - PERFORMING_MAINTENANCE = 5 - - class NodeConfig(proto.Message): - r"""Configuration for a Memcached Node. - - Attributes: - cpu_count (int): - Required. Number of cpus per Memcached node. - memory_size_mb (int): - Required. Memory size in MiB for each - Memcached node. - """ - - cpu_count: int = proto.Field( - proto.INT32, - number=1, - ) - memory_size_mb: int = proto.Field( - proto.INT32, - number=2, - ) - - class Node(proto.Message): - r""" - - Attributes: - node_id (str): - Output only. Identifier of the Memcached - node. The node id does not include project or - location like the Memcached instance name. - zone (str): - Output only. Location (GCP Zone) for the - Memcached node. - state (google.cloud.memcache_v1.types.Instance.Node.State): - Output only. Current state of the Memcached - node. - host (str): - Output only. Hostname or IP address of the - Memcached node used by the clients to connect to - the Memcached server on this node. - port (int): - Output only. The port number of the Memcached - server on this node. - parameters (google.cloud.memcache_v1.types.MemcacheParameters): - User defined parameters currently applied to - the node. - """ - - class State(proto.Enum): - r"""Different states of a Memcached node. - - Values: - STATE_UNSPECIFIED (0): - Node state is not set. - CREATING (1): - Node is being created. - READY (2): - Node has been created and ready to be used. - DELETING (3): - Node is being deleted. - UPDATING (4): - Node is being updated. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - DELETING = 3 - UPDATING = 4 - - node_id: str = proto.Field( - proto.STRING, - number=1, - ) - zone: str = proto.Field( - proto.STRING, - number=2, - ) - state: "Instance.Node.State" = proto.Field( - proto.ENUM, - number=3, - enum="Instance.Node.State", - ) - host: str = proto.Field( - proto.STRING, - number=4, - ) - port: int = proto.Field( - proto.INT32, - number=5, - ) - parameters: "MemcacheParameters" = proto.Field( - proto.MESSAGE, - number=6, - message="MemcacheParameters", - ) - - class InstanceMessage(proto.Message): - r""" - - Attributes: - code (google.cloud.memcache_v1.types.Instance.InstanceMessage.Code): - A code that correspond to one type of - user-facing message. - message (str): - Message on memcached instance which will be - exposed to users. - """ - - class Code(proto.Enum): - r""" - - Values: - CODE_UNSPECIFIED (0): - Message Code not set. - ZONE_DISTRIBUTION_UNBALANCED (1): - Memcached nodes are distributed unevenly. - """ - CODE_UNSPECIFIED = 0 - ZONE_DISTRIBUTION_UNBALANCED = 1 - - code: "Instance.InstanceMessage.Code" = proto.Field( - proto.ENUM, - number=1, - enum="Instance.InstanceMessage.Code", - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - authorized_network: str = proto.Field( - proto.STRING, - number=4, - ) - zones: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - node_count: int = proto.Field( - proto.INT32, - number=6, - ) - node_config: NodeConfig = proto.Field( - proto.MESSAGE, - number=7, - message=NodeConfig, - ) - memcache_version: "MemcacheVersion" = proto.Field( - proto.ENUM, - number=9, - enum="MemcacheVersion", - ) - parameters: "MemcacheParameters" = proto.Field( - proto.MESSAGE, - number=11, - message="MemcacheParameters", - ) - memcache_nodes: MutableSequence[Node] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=Node, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=15, - enum=State, - ) - memcache_full_version: str = proto.Field( - proto.STRING, - number=18, - ) - instance_messages: MutableSequence[InstanceMessage] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message=InstanceMessage, - ) - discovery_endpoint: str = proto.Field( - proto.STRING, - number=20, - ) - maintenance_policy: "MaintenancePolicy" = proto.Field( - proto.MESSAGE, - number=21, - message="MaintenancePolicy", - ) - maintenance_schedule: "MaintenanceSchedule" = proto.Field( - proto.MESSAGE, - number=22, - message="MaintenanceSchedule", - ) - - -class MaintenancePolicy(proto.Message): - r"""Maintenance policy per instance. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the policy was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the policy was - updated. - description (str): - Description of what this policy is for. Create/Update - methods return INVALID_ARGUMENT if the length is greater - than 512. - weekly_maintenance_window (MutableSequence[google.cloud.memcache_v1.types.WeeklyMaintenanceWindow]): - Required. Maintenance window that is applied to resources - covered by this policy. Minimum 1. For the current version, - the maximum number of weekly_maintenance_windows is expected - to be one. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - weekly_maintenance_window: MutableSequence[ - "WeeklyMaintenanceWindow" - ] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message="WeeklyMaintenanceWindow", - ) - - -class WeeklyMaintenanceWindow(proto.Message): - r"""Time window specified for weekly operations. - - Attributes: - day (google.type.dayofweek_pb2.DayOfWeek): - Required. Allows to define schedule that runs - specified day of the week. - start_time (google.type.timeofday_pb2.TimeOfDay): - Required. Start time of the window in UTC. - duration (google.protobuf.duration_pb2.Duration): - Required. Duration of the time window. - """ - - day: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=1, - enum=dayofweek_pb2.DayOfWeek, - ) - start_time: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=2, - message=timeofday_pb2.TimeOfDay, - ) - duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - - -class MaintenanceSchedule(proto.Message): - r"""Upcoming maintenance schedule. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The start time of any upcoming - scheduled maintenance for this instance. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The end time of any upcoming - scheduled maintenance for this instance. - schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The deadline that the - maintenance schedule start time can not go - beyond, including reschedule. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class RescheduleMaintenanceRequest(proto.Message): - r"""Request for - [RescheduleMaintenance][google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance]. - - Attributes: - instance (str): - Required. Memcache instance resource name using the form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region. - reschedule_type (google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType): - Required. If reschedule type is SPECIFIC_TIME, must set up - schedule_time as well. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when the maintenance shall be rescheduled to if - reschedule_type=SPECIFIC_TIME, in RFC 3339 format, for - example ``2012-11-15T16:19:00.094Z``. - """ - - class RescheduleType(proto.Enum): - r"""Reschedule options. - - Values: - RESCHEDULE_TYPE_UNSPECIFIED (0): - Not set. - IMMEDIATE (1): - If the user wants to schedule the maintenance - to happen now. - NEXT_AVAILABLE_WINDOW (2): - If the user wants to use the existing - maintenance policy to find the next available - window. - SPECIFIC_TIME (3): - If the user wants to reschedule the - maintenance to a specific time. - """ - RESCHEDULE_TYPE_UNSPECIFIED = 0 - IMMEDIATE = 1 - NEXT_AVAILABLE_WINDOW = 2 - SPECIFIC_TIME = 3 - - instance: str = proto.Field( - proto.STRING, - number=1, - ) - reschedule_type: RescheduleType = proto.Field( - proto.ENUM, - number=2, - enum=RescheduleType, - ) - schedule_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancesRequest(proto.Message): - r"""Request for - [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - - Attributes: - parent (str): - Required. The resource name of the instance location using - the form: ``projects/{project_id}/locations/{location_id}`` - where ``location_id`` refers to a GCP region - page_size (int): - The maximum number of items to return. - - If not specified, a default value of 1000 will be used by - the service. Regardless of the ``page_size`` value, the - response may include a partial list and a caller should only - rely on response's - [``next_page_token``][google.cloud.memcache.v1.ListInstancesResponse.next_page_token] - to determine if there are more instances left to be queried. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - filter (str): - List filter. For example, exclude all Memcached instances - with name as my-instance by specifying - ``"name != my-instance"``. - order_by (str): - Sort results. Supported values are "name", - "name desc" or "" (unsorted). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListInstancesResponse(proto.Message): - r"""Response for - [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. - - Attributes: - instances (MutableSequence[google.cloud.memcache_v1.types.Instance]): - A list of Memcached instances in the project in the - specified location, or across all locations. - - If the ``location_id`` in the parent field of the request is - "-", all regions available to the project are queried, and - the results aggregated. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - instances: MutableSequence["Instance"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Instance", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetInstanceRequest(proto.Message): - r"""Request for - [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. - - Attributes: - name (str): - Required. Memcached instance resource name in the format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateInstanceRequest(proto.Message): - r"""Request for - [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. - - Attributes: - parent (str): - Required. The resource name of the instance location using - the form: ``projects/{project_id}/locations/{location_id}`` - where ``location_id`` refers to a GCP region - instance_id (str): - Required. The logical name of the Memcached instance in the - user project with the following restrictions: - - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the user project / location. - - If any of the above are not met, the API raises an invalid - argument error. - instance (google.cloud.memcache_v1.types.Instance): - Required. A Memcached Instance - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - instance_id: str = proto.Field( - proto.STRING, - number=2, - ) - instance: "Instance" = proto.Field( - proto.MESSAGE, - number=3, - message="Instance", - ) - - -class UpdateInstanceRequest(proto.Message): - r"""Request for - [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - - - ``displayName`` - instance (google.cloud.memcache_v1.types.Instance): - Required. A Memcached Instance. Only fields specified in - update_mask are updated. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - instance: "Instance" = proto.Field( - proto.MESSAGE, - number=2, - message="Instance", - ) - - -class DeleteInstanceRequest(proto.Message): - r"""Request for - [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. - - Attributes: - name (str): - Required. Memcached instance resource name in the format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ApplyParametersRequest(proto.Message): - r"""Request for - [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. - - Attributes: - name (str): - Required. Resource name of the Memcached - instance for which parameter group updates - should be applied. - node_ids (MutableSequence[str]): - Nodes to which the instance-level parameter - group is applied. - apply_all (bool): - Whether to apply instance-level parameter group to all - nodes. If set to true, users are restricted from specifying - individual nodes, and ``ApplyParameters`` updates all nodes - within the instance. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - node_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - apply_all: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class UpdateParametersRequest(proto.Message): - r"""Request for - [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. - - Attributes: - name (str): - Required. Resource name of the Memcached - instance for which the parameters should be - updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - parameters (google.cloud.memcache_v1.types.MemcacheParameters): - The parameters to apply to the instance. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - parameters: "MemcacheParameters" = proto.Field( - proto.MESSAGE, - number=3, - message="MemcacheParameters", - ) - - -class MemcacheParameters(proto.Message): - r""" - - Attributes: - id (str): - Output only. The unique ID associated with - this set of parameters. Users can use this id to - determine if the parameters associated with the - instance differ from the parameters associated - with the nodes. A discrepancy between parameter - ids can inform users that they may need to take - action to apply parameters on nodes. - params (MutableMapping[str, str]): - User defined set of parameters to use in the - memcached process. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of a long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_detail (str): - Output only. Human-readable status of the - operation, if any. - cancel_requested (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_detail: str = proto.Field( - proto.STRING, - number=5, - ) - cancel_requested: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class LocationMetadata(proto.Message): - r"""Metadata for the given - [google.cloud.location.Location][google.cloud.location.Location]. - - Attributes: - available_zones (MutableMapping[str, google.cloud.memcache_v1.types.ZoneMetadata]): - Output only. The set of available zones in the location. The - map is keyed by the lowercase ID of each zone, as defined by - GCE. These keys can be specified in the ``zones`` field when - creating a Memcached instance. - """ - - available_zones: MutableMapping[str, "ZoneMetadata"] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message="ZoneMetadata", - ) - - -class ZoneMetadata(proto.Message): - r""" """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py deleted file mode 100644 index 1e8afba..0000000 --- a/google/cloud/memcache_v1beta2/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memcache_v1beta2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.cloud_memcache import CloudMemcacheAsyncClient, CloudMemcacheClient -from .types.cloud_memcache import ( - ApplyParametersRequest, - ApplySoftwareUpdateRequest, - CreateInstanceRequest, - DeleteInstanceRequest, - GetInstanceRequest, - Instance, - ListInstancesRequest, - ListInstancesResponse, - LocationMetadata, - MaintenancePolicy, - MaintenanceSchedule, - MemcacheParameters, - MemcacheVersion, - OperationMetadata, - RescheduleMaintenanceRequest, - UpdateInstanceRequest, - UpdateParametersRequest, - WeeklyMaintenanceWindow, - ZoneMetadata, -) - -__all__ = ( - "CloudMemcacheAsyncClient", - "ApplyParametersRequest", - "ApplySoftwareUpdateRequest", - "CloudMemcacheClient", - "CreateInstanceRequest", - "DeleteInstanceRequest", - "GetInstanceRequest", - "Instance", - "ListInstancesRequest", - "ListInstancesResponse", - "LocationMetadata", - "MaintenancePolicy", - "MaintenanceSchedule", - "MemcacheParameters", - "MemcacheVersion", - "OperationMetadata", - "RescheduleMaintenanceRequest", - "UpdateInstanceRequest", - "UpdateParametersRequest", - "WeeklyMaintenanceWindow", - "ZoneMetadata", -) diff --git a/google/cloud/memcache_v1beta2/gapic_metadata.json b/google/cloud/memcache_v1beta2/gapic_metadata.json deleted file mode 100644 index 265ae99..0000000 --- a/google/cloud/memcache_v1beta2/gapic_metadata.json +++ /dev/null @@ -1,163 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.memcache_v1beta2", - "protoPackage": "google.cloud.memcache.v1beta2", - "schema": "1.0", - "services": { - "CloudMemcache": { - "clients": { - "grpc": { - "libraryClient": "CloudMemcacheClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "ApplySoftwareUpdate": { - "methods": [ - "apply_software_update" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "RescheduleMaintenance": { - "methods": [ - "reschedule_maintenance" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudMemcacheAsyncClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "ApplySoftwareUpdate": { - "methods": [ - "apply_software_update" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "RescheduleMaintenance": { - "methods": [ - "reschedule_maintenance" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - }, - "rest": { - "libraryClient": "CloudMemcacheClient", - "rpcs": { - "ApplyParameters": { - "methods": [ - "apply_parameters" - ] - }, - "ApplySoftwareUpdate": { - "methods": [ - "apply_software_update" - ] - }, - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "RescheduleMaintenance": { - "methods": [ - "reschedule_maintenance" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateParameters": { - "methods": [ - "update_parameters" - ] - } - } - } - } - } - } -} diff --git a/google/cloud/memcache_v1beta2/gapic_version.py b/google/cloud/memcache_v1beta2/gapic_version.py deleted file mode 100644 index 84856f0..0000000 --- a/google/cloud/memcache_v1beta2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "1.7.1" # {x-release-please-version} diff --git a/google/cloud/memcache_v1beta2/py.typed b/google/cloud/memcache_v1beta2/py.typed deleted file mode 100644 index 7959cf4..0000000 --- a/google/cloud/memcache_v1beta2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-memcache package uses inline types. diff --git a/google/cloud/memcache_v1beta2/services/__init__.py b/google/cloud/memcache_v1beta2/services/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/google/cloud/memcache_v1beta2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py deleted file mode 100644 index 61c41a1..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .async_client import CloudMemcacheAsyncClient -from .client import CloudMemcacheClient - -__all__ = ( - "CloudMemcacheClient", - "CloudMemcacheAsyncClient", -) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py deleted file mode 100644 index 37f3efe..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ /dev/null @@ -1,1791 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import ( - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, -) - -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core.client_options import ClientOptions -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.memcache_v1beta2 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - -from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers -from google.cloud.memcache_v1beta2.types import cloud_memcache - -from .client import CloudMemcacheClient -from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport -from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport - - -class CloudMemcacheAsyncClient: - """Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - """ - - _client: CloudMemcacheClient - - DEFAULT_ENDPOINT = CloudMemcacheClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CloudMemcacheClient.DEFAULT_MTLS_ENDPOINT - - instance_path = staticmethod(CloudMemcacheClient.instance_path) - parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) - common_billing_account_path = staticmethod( - CloudMemcacheClient.common_billing_account_path - ) - parse_common_billing_account_path = staticmethod( - CloudMemcacheClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) - parse_common_folder_path = staticmethod( - CloudMemcacheClient.parse_common_folder_path - ) - common_organization_path = staticmethod( - CloudMemcacheClient.common_organization_path - ) - parse_common_organization_path = staticmethod( - CloudMemcacheClient.parse_common_organization_path - ) - common_project_path = staticmethod(CloudMemcacheClient.common_project_path) - parse_common_project_path = staticmethod( - CloudMemcacheClient.parse_common_project_path - ) - common_location_path = staticmethod(CloudMemcacheClient.common_location_path) - parse_common_location_path = staticmethod( - CloudMemcacheClient.parse_common_location_path - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudMemcacheAsyncClient: The constructed client. - """ - return CloudMemcacheClient.from_service_account_info.__func__(CloudMemcacheAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudMemcacheAsyncClient: The constructed client. - """ - return CloudMemcacheClient.from_service_account_file.__func__(CloudMemcacheAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return CloudMemcacheClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> CloudMemcacheTransport: - """Returns the transport used by the client instance. - - Returns: - CloudMemcacheTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial( - type(CloudMemcacheClient).get_transport_class, type(CloudMemcacheClient) - ) - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud memcache client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.CloudMemcacheTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CloudMemcacheClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - ) - - async def list_instances( - self, - request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesAsyncPager: - r"""Lists Instances in a given location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_list_instances(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]]): - The request object. Request for - [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - parent (:class:`str`): - Required. The resource name of the instance location - using the form: - ``projects/{project_id}/locations/{location_id}`` where - ``location_id`` refers to a GCP region - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesAsyncPager: - Response for - [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.ListInstancesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_instances, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance( - self, - request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_memcache.Instance: - r"""Gets details of a single Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_get_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]]): - The request object. Request for - [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. - name (:class:`str`): - Required. Memcached instance resource name in the - format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.memcache_v1beta2.types.Instance: - A Memorystore for Memcached instance - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.GetInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_instance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance( - self, - request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - resource: Optional[cloud_memcache.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new Instance in a given location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_create_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - resource = memcache_v1beta2.Instance() - resource.name = "name_value" - resource.node_count = 1070 - resource.node_config.cpu_count = 976 - resource.node_config.memory_size_mb = 1505 - - request = memcache_v1beta2.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - resource=resource, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]]): - The request object. Request for - [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. - parent (:class:`str`): - Required. The resource name of the instance location - using the form: - ``projects/{project_id}/locations/{location_id}`` where - ``location_id`` refers to a GCP region - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (:class:`str`): - Required. The logical name of the Memcached instance in - the user project with the following restrictions: - - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the user project / location. - - If any of the above are not met, the API raises an - invalid argument error. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource (:class:`google.cloud.memcache_v1beta2.types.Instance`): - Required. A Memcached [Instance] resource - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, resource]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.CreateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_instance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_instance( - self, - request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - resource: Optional[cloud_memcache.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an existing Instance in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_update_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - resource = memcache_v1beta2.Instance() - resource.name = "name_value" - resource.node_count = 1070 - resource.node_config.cpu_count = 976 - resource.node_config.memory_size_mb = 1505 - - request = memcache_v1beta2.UpdateInstanceRequest( - resource=resource, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]]): - The request object. Request for - [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - - - ``displayName`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource (:class:`google.cloud.memcache_v1beta2.types.Instance`): - Required. A Memcached [Instance] resource. Only fields - specified in update_mask are updated. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, resource]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.UpdateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if update_mask is not None: - request.update_mask = update_mask - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_instance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource.name", request.resource.name),) - ), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_parameters( - self, - request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, - *, - name: Optional[str] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - parameters: Optional[cloud_memcache.MemcacheParameters] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the defined Memcached parameters for an existing - instance. This method only stages the parameters, it must be - followed by ``ApplyParameters`` to apply the parameters to nodes - of the Memcached instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_update_parameters(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.UpdateParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.update_parameters(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]]): - The request object. Request for - [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. - name (:class:`str`): - Required. Resource name of the - Memcached instance for which the - parameters should be updated. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - parameters (:class:`google.cloud.memcache_v1beta2.types.MemcacheParameters`): - The parameters to apply to the - instance. - - This corresponds to the ``parameters`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, update_mask, parameters]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.UpdateParametersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if update_mask is not None: - request.update_mask = update_mask - if parameters is not None: - request.parameters = parameters - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_parameters, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance( - self, - request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_delete_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]]): - The request object. Request for - [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. - name (:class:`str`): - Required. Memcached instance resource name in the - format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.DeleteInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_instance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def apply_parameters( - self, - request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, - *, - name: Optional[str] = None, - node_ids: Optional[MutableSequence[str]] = None, - apply_all: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""``ApplyParameters`` restarts the set of specified nodes in order - to update them to the current set of parameters for the - Memcached Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_apply_parameters(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ApplyParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.apply_parameters(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]]): - The request object. Request for - [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. - name (:class:`str`): - Required. Resource name of the - Memcached instance for which parameter - group updates should be applied. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_ids (:class:`MutableSequence[str]`): - Nodes to which the instance-level - parameter group is applied. - - This corresponds to the ``node_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - apply_all (:class:`bool`): - Whether to apply instance-level parameter group to all - nodes. If set to true, users are restricted from - specifying individual nodes, and ``ApplyParameters`` - updates all nodes within the instance. - - This corresponds to the ``apply_all`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, node_ids, apply_all]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.ApplyParametersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if apply_all is not None: - request.apply_all = apply_all - if node_ids: - request.node_ids.extend(node_ids) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.apply_parameters, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def apply_software_update( - self, - request: Optional[ - Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] - ] = None, - *, - instance: Optional[str] = None, - node_ids: Optional[MutableSequence[str]] = None, - apply_all: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates software on the selected nodes of the - Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_apply_software_update(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ApplySoftwareUpdateRequest( - instance="instance_value", - ) - - # Make the request - operation = client.apply_software_update(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]]): - The request object. Request for - [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. - instance (:class:`str`): - Required. Resource name of the - Memcached instance for which software - update should be applied. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_ids (:class:`MutableSequence[str]`): - Nodes to which we should apply the - update to. Note all the selected nodes - are updated in parallel. - - This corresponds to the ``node_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - apply_all (:class:`bool`): - Whether to apply the update to all - nodes. If set to true, will explicitly - restrict users from specifying any - nodes, and apply software update to all - nodes (where applicable) within the - instance. - - This corresponds to the ``apply_all`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, node_ids, apply_all]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.ApplySoftwareUpdateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if apply_all is not None: - request.apply_all = apply_all - if node_ids: - request.node_ids.extend(node_ids) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.apply_software_update, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def reschedule_maintenance( - self, - request: Optional[ - Union[cloud_memcache.RescheduleMaintenanceRequest, dict] - ] = None, - *, - instance: Optional[str] = None, - reschedule_type: Optional[ - cloud_memcache.RescheduleMaintenanceRequest.RescheduleType - ] = None, - schedule_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Performs the apply phase of the RescheduleMaintenance - verb. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - async def sample_reschedule_maintenance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.RescheduleMaintenanceRequest( - instance="instance_value", - reschedule_type="SPECIFIC_TIME", - ) - - # Make the request - operation = client.reschedule_maintenance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest, dict]]): - The request object. Request for - [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. - instance (:class:`str`): - Required. Memcache instance resource name using the - form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - reschedule_type (:class:`google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType`): - Required. If reschedule type is SPECIFIC_TIME, must set - up schedule_time as well. - - This corresponds to the ``reschedule_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Timestamp when the maintenance shall be rescheduled to - if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, - for example ``2012-11-15T16:19:00.094Z``. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, reschedule_type, schedule_time]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = cloud_memcache.RescheduleMaintenanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if reschedule_type is not None: - request.reschedule_type = reschedule_type - if schedule_time is not None: - request.schedule_time = schedule_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reschedule_maintenance, - default_timeout=1200.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py deleted file mode 100644 index 6f877bd..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ /dev/null @@ -1,2025 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import ( - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.memcache_v1beta2 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - -from google.cloud.memcache_v1beta2.services.cloud_memcache import pagers -from google.cloud.memcache_v1beta2.types import cloud_memcache - -from .transports.base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport -from .transports.grpc import CloudMemcacheGrpcTransport -from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport -from .transports.rest import CloudMemcacheRestTransport - - -class CloudMemcacheClientMeta(type): - """Metaclass for the CloudMemcache client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] - _transport_registry["grpc"] = CloudMemcacheGrpcTransport - _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport - _transport_registry["rest"] = CloudMemcacheRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[CloudMemcacheTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): - """Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "memcache.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudMemcacheClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudMemcacheClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudMemcacheTransport: - """Returns the transport used by the client instance. - - Returns: - CloudMemcacheTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def instance_path( - project: str, - location: str, - instance: str, - ) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, - location=location, - instance=instance, - ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str, str]: - """Parses a instance path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudMemcacheTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud memcache client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, CloudMemcacheTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options - ) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudMemcacheTransport): - # transport is a CloudMemcacheTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def list_instances( - self, - request: Optional[Union[cloud_memcache.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesPager: - r"""Lists Instances in a given location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_list_instances(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.ListInstancesRequest, dict]): - The request object. Request for - [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - parent (str): - Required. The resource name of the instance location - using the form: - ``projects/{project_id}/locations/{location_id}`` where - ``location_id`` refers to a GCP region - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesPager: - Response for - [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.ListInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.ListInstancesRequest): - request = cloud_memcache.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance( - self, - request: Optional[Union[cloud_memcache.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_memcache.Instance: - r"""Gets details of a single Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_get_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.GetInstanceRequest, dict]): - The request object. Request for - [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. - name (str): - Required. Memcached instance resource name in the - format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.memcache_v1beta2.types.Instance: - A Memorystore for Memcached instance - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.GetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.GetInstanceRequest): - request = cloud_memcache.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance( - self, - request: Optional[Union[cloud_memcache.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - resource: Optional[cloud_memcache.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new Instance in a given location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_create_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - resource = memcache_v1beta2.Instance() - resource.name = "name_value" - resource.node_count = 1070 - resource.node_config.cpu_count = 976 - resource.node_config.memory_size_mb = 1505 - - request = memcache_v1beta2.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - resource=resource, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.CreateInstanceRequest, dict]): - The request object. Request for - [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. - parent (str): - Required. The resource name of the instance location - using the form: - ``projects/{project_id}/locations/{location_id}`` where - ``location_id`` refers to a GCP region - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (str): - Required. The logical name of the Memcached instance in - the user project with the following restrictions: - - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the user project / location. - - If any of the above are not met, the API raises an - invalid argument error. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource (google.cloud.memcache_v1beta2.types.Instance): - Required. A Memcached [Instance] resource - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, resource]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.CreateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.CreateInstanceRequest): - request = cloud_memcache.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_instance( - self, - request: Optional[Union[cloud_memcache.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - resource: Optional[cloud_memcache.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates an existing Instance in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_update_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - resource = memcache_v1beta2.Instance() - resource.name = "name_value" - resource.node_count = 1070 - resource.node_config.cpu_count = 976 - resource.node_config.memory_size_mb = 1505 - - request = memcache_v1beta2.UpdateInstanceRequest( - resource=resource, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.UpdateInstanceRequest, dict]): - The request object. Request for - [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - - - ``displayName`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource (google.cloud.memcache_v1beta2.types.Instance): - Required. A Memcached [Instance] resource. Only fields - specified in update_mask are updated. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, resource]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.UpdateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.UpdateInstanceRequest): - request = cloud_memcache.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if update_mask is not None: - request.update_mask = update_mask - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource.name", request.resource.name),) - ), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_parameters( - self, - request: Optional[Union[cloud_memcache.UpdateParametersRequest, dict]] = None, - *, - name: Optional[str] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - parameters: Optional[cloud_memcache.MemcacheParameters] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates the defined Memcached parameters for an existing - instance. This method only stages the parameters, it must be - followed by ``ApplyParameters`` to apply the parameters to nodes - of the Memcached instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_update_parameters(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.UpdateParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.update_parameters(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.UpdateParametersRequest, dict]): - The request object. Request for - [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. - name (str): - Required. Resource name of the - Memcached instance for which the - parameters should be updated. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): - The parameters to apply to the - instance. - - This corresponds to the ``parameters`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, update_mask, parameters]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.UpdateParametersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.UpdateParametersRequest): - request = cloud_memcache.UpdateParametersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if update_mask is not None: - request.update_mask = update_mask - if parameters is not None: - request.parameters = parameters - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_parameters] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_instance( - self, - request: Optional[Union[cloud_memcache.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a single Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_delete_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.DeleteInstanceRequest, dict]): - The request object. Request for - [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. - name (str): - Required. Memcached instance resource name in the - format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.DeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.DeleteInstanceRequest): - request = cloud_memcache.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def apply_parameters( - self, - request: Optional[Union[cloud_memcache.ApplyParametersRequest, dict]] = None, - *, - name: Optional[str] = None, - node_ids: Optional[MutableSequence[str]] = None, - apply_all: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""``ApplyParameters`` restarts the set of specified nodes in order - to update them to the current set of parameters for the - Memcached Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_apply_parameters(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ApplyParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.apply_parameters(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.ApplyParametersRequest, dict]): - The request object. Request for - [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. - name (str): - Required. Resource name of the - Memcached instance for which parameter - group updates should be applied. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_ids (MutableSequence[str]): - Nodes to which the instance-level - parameter group is applied. - - This corresponds to the ``node_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - apply_all (bool): - Whether to apply instance-level parameter group to all - nodes. If set to true, users are restricted from - specifying individual nodes, and ``ApplyParameters`` - updates all nodes within the instance. - - This corresponds to the ``apply_all`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, node_ids, apply_all]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.ApplyParametersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.ApplyParametersRequest): - request = cloud_memcache.ApplyParametersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if node_ids is not None: - request.node_ids = node_ids - if apply_all is not None: - request.apply_all = apply_all - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.apply_parameters] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def apply_software_update( - self, - request: Optional[ - Union[cloud_memcache.ApplySoftwareUpdateRequest, dict] - ] = None, - *, - instance: Optional[str] = None, - node_ids: Optional[MutableSequence[str]] = None, - apply_all: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates software on the selected nodes of the - Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_apply_software_update(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ApplySoftwareUpdateRequest( - instance="instance_value", - ) - - # Make the request - operation = client.apply_software_update(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest, dict]): - The request object. Request for - [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. - instance (str): - Required. Resource name of the - Memcached instance for which software - update should be applied. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_ids (MutableSequence[str]): - Nodes to which we should apply the - update to. Note all the selected nodes - are updated in parallel. - - This corresponds to the ``node_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - apply_all (bool): - Whether to apply the update to all - nodes. If set to true, will explicitly - restrict users from specifying any - nodes, and apply software update to all - nodes (where applicable) within the - instance. - - This corresponds to the ``apply_all`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, node_ids, apply_all]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.ApplySoftwareUpdateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.ApplySoftwareUpdateRequest): - request = cloud_memcache.ApplySoftwareUpdateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if node_ids is not None: - request.node_ids = node_ids - if apply_all is not None: - request.apply_all = apply_all - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.apply_software_update] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def reschedule_maintenance( - self, - request: Optional[ - Union[cloud_memcache.RescheduleMaintenanceRequest, dict] - ] = None, - *, - instance: Optional[str] = None, - reschedule_type: Optional[ - cloud_memcache.RescheduleMaintenanceRequest.RescheduleType - ] = None, - schedule_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Performs the apply phase of the RescheduleMaintenance - verb. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import memcache_v1beta2 - - def sample_reschedule_maintenance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.RescheduleMaintenanceRequest( - instance="instance_value", - reschedule_type="SPECIFIC_TIME", - ) - - # Make the request - operation = client.reschedule_maintenance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest, dict]): - The request object. Request for - [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. - instance (str): - Required. Memcache instance resource name using the - form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - reschedule_type (google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType): - Required. If reschedule type is SPECIFIC_TIME, must set - up schedule_time as well. - - This corresponds to the ``reschedule_type`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when the maintenance shall be rescheduled to - if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, - for example ``2012-11-15T16:19:00.094Z``. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.memcache_v1beta2.types.Instance` A - Memorystore for Memcached instance - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, reschedule_type, schedule_time]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a cloud_memcache.RescheduleMaintenanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloud_memcache.RescheduleMaintenanceRequest): - request = cloud_memcache.RescheduleMaintenanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if reschedule_type is not None: - request.reschedule_type = reschedule_type - if schedule_time is not None: - request.schedule_time = schedule_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - cloud_memcache.Instance, - metadata_type=cloud_memcache.OperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "CloudMemcacheClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py deleted file mode 100644 index 3bb67cb..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, -) - -from google.cloud.memcache_v1beta2.types import cloud_memcache - - -class ListInstancesPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``resources`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``resources`` field on the - corresponding responses. - - All the usual :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., cloud_memcache.ListInstancesResponse], - request: cloud_memcache.ListInstancesRequest, - response: cloud_memcache.ListInstancesResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): - The initial request object. - response (google.cloud.memcache_v1beta2.types.ListInstancesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloud_memcache.ListInstancesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[cloud_memcache.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[cloud_memcache.Instance]: - for page in self.pages: - yield from page.resources - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListInstancesAsyncPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``resources`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``resources`` field on the - corresponding responses. - - All the usual :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[cloud_memcache.ListInstancesResponse]], - request: cloud_memcache.ListInstancesRequest, - response: cloud_memcache.ListInstancesResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): - The initial request object. - response (google.cloud.memcache_v1beta2.types.ListInstancesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloud_memcache.ListInstancesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[cloud_memcache.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[cloud_memcache.Instance]: - async def async_generator(): - async for page in self.pages: - for response in page.resources: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py deleted file mode 100644 index 8e6a821..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CloudMemcacheTransport -from .grpc import CloudMemcacheGrpcTransport -from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport -from .rest import CloudMemcacheRestInterceptor, CloudMemcacheRestTransport - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] -_transport_registry["grpc"] = CloudMemcacheGrpcTransport -_transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport -_transport_registry["rest"] = CloudMemcacheRestTransport - -__all__ = ( - "CloudMemcacheTransport", - "CloudMemcacheGrpcTransport", - "CloudMemcacheGrpcAsyncIOTransport", - "CloudMemcacheRestTransport", - "CloudMemcacheRestInterceptor", -) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py deleted file mode 100644 index 828089f..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ /dev/null @@ -1,331 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, operations_v1 -from google.api_core import retry as retries -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.memcache_v1beta2 import gapic_version as package_version -from google.cloud.memcache_v1beta2.types import cloud_memcache - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -class CloudMemcacheTransport(abc.ABC): - """Abstract transport class for CloudMemcache.""" - - AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - DEFAULT_HOST: str = "memcache.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, - default_timeout=1200.0, - client_info=client_info, - ), - self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, - default_timeout=1200.0, - client_info=client_info, - ), - self.create_instance: gapic_v1.method.wrap_method( - self.create_instance, - default_timeout=1200.0, - client_info=client_info, - ), - self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, - default_timeout=1200.0, - client_info=client_info, - ), - self.update_parameters: gapic_v1.method.wrap_method( - self.update_parameters, - default_timeout=1200.0, - client_info=client_info, - ), - self.delete_instance: gapic_v1.method.wrap_method( - self.delete_instance, - default_timeout=1200.0, - client_info=client_info, - ), - self.apply_parameters: gapic_v1.method.wrap_method( - self.apply_parameters, - default_timeout=1200.0, - client_info=client_info, - ), - self.apply_software_update: gapic_v1.method.wrap_method( - self.apply_software_update, - default_timeout=1200.0, - client_info=client_info, - ), - self.reschedule_maintenance: gapic_v1.method.wrap_method( - self.reschedule_maintenance, - default_timeout=1200.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_instances( - self, - ) -> Callable[ - [cloud_memcache.ListInstancesRequest], - Union[ - cloud_memcache.ListInstancesResponse, - Awaitable[cloud_memcache.ListInstancesResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_instance( - self, - ) -> Callable[ - [cloud_memcache.GetInstanceRequest], - Union[cloud_memcache.Instance, Awaitable[cloud_memcache.Instance]], - ]: - raise NotImplementedError() - - @property - def create_instance( - self, - ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def update_instance( - self, - ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def update_parameters( - self, - ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def delete_instance( - self, - ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def apply_parameters( - self, - ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def apply_software_update( - self, - ) -> Callable[ - [cloud_memcache.ApplySoftwareUpdateRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def reschedule_maintenance( - self, - ) -> Callable[ - [cloud_memcache.RescheduleMaintenanceRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[ - operations_pb2.ListOperationsResponse, - Awaitable[operations_pb2.ListOperationsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: - raise NotImplementedError() - - @property - def get_location( - self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations( - self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[ - locations_pb2.ListLocationsResponse, - Awaitable[locations_pb2.ListLocationsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py deleted file mode 100644 index 5035a2e..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ /dev/null @@ -1,628 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Callable, Dict, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, grpc_helpers, operations_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -import grpc # type: ignore - -from google.cloud.memcache_v1beta2.types import cloud_memcache - -from .base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport - - -class CloudMemcacheGrpcTransport(CloudMemcacheTransport): - """gRPC backend transport for CloudMemcache. - - Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _stubs: Dict[str, Callable] - - def __init__( - self, - *, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel( - cls, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service.""" - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) - - # Return the client from cache. - return self._operations_client - - @property - def list_instances( - self, - ) -> Callable[ - [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse - ]: - r"""Return a callable for the list instances method over gRPC. - - Lists Instances in a given location. - - Returns: - Callable[[~.ListInstancesRequest], - ~.ListInstancesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_instances" not in self._stubs: - self._stubs["list_instances"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/ListInstances", - request_serializer=cloud_memcache.ListInstancesRequest.serialize, - response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, - ) - return self._stubs["list_instances"] - - @property - def get_instance( - self, - ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: - r"""Return a callable for the get instance method over gRPC. - - Gets details of a single Instance. - - Returns: - Callable[[~.GetInstanceRequest], - ~.Instance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_instance" not in self._stubs: - self._stubs["get_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/GetInstance", - request_serializer=cloud_memcache.GetInstanceRequest.serialize, - response_deserializer=cloud_memcache.Instance.deserialize, - ) - return self._stubs["get_instance"] - - @property - def create_instance( - self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: - r"""Return a callable for the create instance method over gRPC. - - Creates a new Instance in a given location. - - Returns: - Callable[[~.CreateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_instance" not in self._stubs: - self._stubs["create_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", - request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["create_instance"] - - @property - def update_instance( - self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: - r"""Return a callable for the update instance method over gRPC. - - Updates an existing Instance in a given project and - location. - - Returns: - Callable[[~.UpdateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_instance" not in self._stubs: - self._stubs["update_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", - request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_instance"] - - @property - def update_parameters( - self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: - r"""Return a callable for the update parameters method over gRPC. - - Updates the defined Memcached parameters for an existing - instance. This method only stages the parameters, it must be - followed by ``ApplyParameters`` to apply the parameters to nodes - of the Memcached instance. - - Returns: - Callable[[~.UpdateParametersRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_parameters" not in self._stubs: - self._stubs["update_parameters"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", - request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_parameters"] - - @property - def delete_instance( - self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes a single Instance. - - Returns: - Callable[[~.DeleteInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_instance" not in self._stubs: - self._stubs["delete_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", - request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["delete_instance"] - - @property - def apply_parameters( - self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: - r"""Return a callable for the apply parameters method over gRPC. - - ``ApplyParameters`` restarts the set of specified nodes in order - to update them to the current set of parameters for the - Memcached Instance. - - Returns: - Callable[[~.ApplyParametersRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "apply_parameters" not in self._stubs: - self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", - request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["apply_parameters"] - - @property - def apply_software_update( - self, - ) -> Callable[ - [cloud_memcache.ApplySoftwareUpdateRequest], operations_pb2.Operation - ]: - r"""Return a callable for the apply software update method over gRPC. - - Updates software on the selected nodes of the - Instance. - - Returns: - Callable[[~.ApplySoftwareUpdateRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "apply_software_update" not in self._stubs: - self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", - request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["apply_software_update"] - - @property - def reschedule_maintenance( - self, - ) -> Callable[ - [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation - ]: - r"""Return a callable for the reschedule maintenance method over gRPC. - - Performs the apply phase of the RescheduleMaintenance - verb. - - Returns: - Callable[[~.RescheduleMaintenanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "reschedule_maintenance" not in self._stubs: - self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/RescheduleMaintenance", - request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["reschedule_maintenance"] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse - ]: - r"""Return a callable for the list_operations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse - ]: - r"""Return a callable for the list locations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py deleted file mode 100644 index 68085b2..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ /dev/null @@ -1,643 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.memcache_v1beta2.types import cloud_memcache - -from .base import DEFAULT_CLIENT_INFO, CloudMemcacheTransport -from .grpc import CloudMemcacheGrpcTransport - - -class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): - """gRPC AsyncIO backend transport for CloudMemcache. - - Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel( - cls, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - def __init__( - self, - *, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instances( - self, - ) -> Callable[ - [cloud_memcache.ListInstancesRequest], - Awaitable[cloud_memcache.ListInstancesResponse], - ]: - r"""Return a callable for the list instances method over gRPC. - - Lists Instances in a given location. - - Returns: - Callable[[~.ListInstancesRequest], - Awaitable[~.ListInstancesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_instances" not in self._stubs: - self._stubs["list_instances"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/ListInstances", - request_serializer=cloud_memcache.ListInstancesRequest.serialize, - response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, - ) - return self._stubs["list_instances"] - - @property - def get_instance( - self, - ) -> Callable[ - [cloud_memcache.GetInstanceRequest], Awaitable[cloud_memcache.Instance] - ]: - r"""Return a callable for the get instance method over gRPC. - - Gets details of a single Instance. - - Returns: - Callable[[~.GetInstanceRequest], - Awaitable[~.Instance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_instance" not in self._stubs: - self._stubs["get_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/GetInstance", - request_serializer=cloud_memcache.GetInstanceRequest.serialize, - response_deserializer=cloud_memcache.Instance.deserialize, - ) - return self._stubs["get_instance"] - - @property - def create_instance( - self, - ) -> Callable[ - [cloud_memcache.CreateInstanceRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the create instance method over gRPC. - - Creates a new Instance in a given location. - - Returns: - Callable[[~.CreateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_instance" not in self._stubs: - self._stubs["create_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/CreateInstance", - request_serializer=cloud_memcache.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["create_instance"] - - @property - def update_instance( - self, - ) -> Callable[ - [cloud_memcache.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the update instance method over gRPC. - - Updates an existing Instance in a given project and - location. - - Returns: - Callable[[~.UpdateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_instance" not in self._stubs: - self._stubs["update_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateInstance", - request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_instance"] - - @property - def update_parameters( - self, - ) -> Callable[ - [cloud_memcache.UpdateParametersRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the update parameters method over gRPC. - - Updates the defined Memcached parameters for an existing - instance. This method only stages the parameters, it must be - followed by ``ApplyParameters`` to apply the parameters to nodes - of the Memcached instance. - - Returns: - Callable[[~.UpdateParametersRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_parameters" not in self._stubs: - self._stubs["update_parameters"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/UpdateParameters", - request_serializer=cloud_memcache.UpdateParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["update_parameters"] - - @property - def delete_instance( - self, - ) -> Callable[ - [cloud_memcache.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes a single Instance. - - Returns: - Callable[[~.DeleteInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_instance" not in self._stubs: - self._stubs["delete_instance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/DeleteInstance", - request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["delete_instance"] - - @property - def apply_parameters( - self, - ) -> Callable[ - [cloud_memcache.ApplyParametersRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the apply parameters method over gRPC. - - ``ApplyParameters`` restarts the set of specified nodes in order - to update them to the current set of parameters for the - Memcached Instance. - - Returns: - Callable[[~.ApplyParametersRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "apply_parameters" not in self._stubs: - self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/ApplyParameters", - request_serializer=cloud_memcache.ApplyParametersRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["apply_parameters"] - - @property - def apply_software_update( - self, - ) -> Callable[ - [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations_pb2.Operation] - ]: - r"""Return a callable for the apply software update method over gRPC. - - Updates software on the selected nodes of the - Instance. - - Returns: - Callable[[~.ApplySoftwareUpdateRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "apply_software_update" not in self._stubs: - self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", - request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["apply_software_update"] - - @property - def reschedule_maintenance( - self, - ) -> Callable[ - [cloud_memcache.RescheduleMaintenanceRequest], - Awaitable[operations_pb2.Operation], - ]: - r"""Return a callable for the reschedule maintenance method over gRPC. - - Performs the apply phase of the RescheduleMaintenance - verb. - - Returns: - Callable[[~.RescheduleMaintenanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "reschedule_maintenance" not in self._stubs: - self._stubs["reschedule_maintenance"] = self.grpc_channel.unary_unary( - "/google.cloud.memcache.v1beta2.CloudMemcache/RescheduleMaintenance", - request_serializer=cloud_memcache.RescheduleMaintenanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["reschedule_maintenance"] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse - ]: - r"""Return a callable for the list_operations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse - ]: - r"""Return a callable for the list locations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py deleted file mode 100644 index fc45c4d..0000000 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py +++ /dev/null @@ -1,2005 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import ( - gapic_v1, - operations_v1, - path_template, - rest_helpers, - rest_streaming, -) -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.cloud.memcache_v1beta2.types import cloud_memcache - -from .base import CloudMemcacheTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CloudMemcacheRestInterceptor: - """Interceptor for CloudMemcache. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CloudMemcacheRestTransport. - - .. code-block:: python - class MyCustomCloudMemcacheInterceptor(CloudMemcacheRestInterceptor): - def pre_apply_parameters(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_apply_parameters(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_apply_software_update(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_apply_software_update(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instances(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instances(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_reschedule_maintenance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_reschedule_maintenance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_parameters(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_parameters(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CloudMemcacheRestTransport(interceptor=MyCustomCloudMemcacheInterceptor()) - client = CloudMemcacheClient(transport=transport) - - - """ - - def pre_apply_parameters( - self, - request: cloud_memcache.ApplyParametersRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.ApplyParametersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for apply_parameters - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_apply_parameters( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for apply_parameters - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_apply_software_update( - self, - request: cloud_memcache.ApplySoftwareUpdateRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.ApplySoftwareUpdateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for apply_software_update - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_apply_software_update( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for apply_software_update - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_create_instance( - self, - request: cloud_memcache.CreateInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.CreateInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_create_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_instance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_delete_instance( - self, - request: cloud_memcache.DeleteInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_delete_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_instance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_get_instance( - self, - request: cloud_memcache.GetInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.GetInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_get_instance( - self, response: cloud_memcache.Instance - ) -> cloud_memcache.Instance: - """Post-rpc interceptor for get_instance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_list_instances( - self, - request: cloud_memcache.ListInstancesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.ListInstancesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_instances - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_list_instances( - self, response: cloud_memcache.ListInstancesResponse - ) -> cloud_memcache.ListInstancesResponse: - """Post-rpc interceptor for list_instances - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_reschedule_maintenance( - self, - request: cloud_memcache.RescheduleMaintenanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for reschedule_maintenance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_reschedule_maintenance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for reschedule_maintenance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_update_instance( - self, - request: cloud_memcache.UpdateInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_update_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_update_parameters( - self, - request: cloud_memcache.UpdateParametersRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[cloud_memcache.UpdateParametersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_parameters - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_update_parameters( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_parameters - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_get_location( - self, - request: locations_pb2.GetLocationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, - request: locations_pb2.ListLocationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, - request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, - request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, - request: operations_pb2.ListOperationsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudMemcache server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the CloudMemcache server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CloudMemcacheRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CloudMemcacheRestInterceptor - - -class CloudMemcacheRestTransport(CloudMemcacheTransport): - """REST backend transport for CloudMemcache. - - Configures and manages Cloud Memorystore for Memcached instances. - - The ``memcache.googleapis.com`` service implements the Google Cloud - Memorystore for Memcached API and defines the following resource - model for managing Memorystore Memcached (also called Memcached - below) instances: - - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Memcached instances, named: - ``/instances/*`` - - As such, Memcached instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note that location_id must be a GCP ``region``; for example: - - - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "memcache.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[CloudMemcacheRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CloudMemcacheRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.CancelOperation": [ - { - "method": "post", - "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ], - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1beta2/{name=projects/*/locations/*}/operations", - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1beta2", - ) - - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport - ) - - # Return the client from cache. - return self._operations_client - - class _ApplyParameters(CloudMemcacheRestStub): - def __hash__(self): - return hash("ApplyParameters") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.ApplyParametersRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the apply parameters method over HTTP. - - Args: - request (~.cloud_memcache.ApplyParametersRequest): - The request object. Request for - [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1beta2/{name=projects/*/locations/*/instances/*}:applyParameters", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_apply_parameters( - request, metadata - ) - pb_request = cloud_memcache.ApplyParametersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_apply_parameters(resp) - return resp - - class _ApplySoftwareUpdate(CloudMemcacheRestStub): - def __hash__(self): - return hash("ApplySoftwareUpdate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.ApplySoftwareUpdateRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the apply software update method over HTTP. - - Args: - request (~.cloud_memcache.ApplySoftwareUpdateRequest): - The request object. Request for - [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1beta2/{instance=projects/*/locations/*/instances/*}:applySoftwareUpdate", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_apply_software_update( - request, metadata - ) - pb_request = cloud_memcache.ApplySoftwareUpdateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_apply_software_update(resp) - return resp - - class _CreateInstance(CloudMemcacheRestStub): - def __hash__(self): - return hash("CreateInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId": "", - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.CreateInstanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the create instance method over HTTP. - - Args: - request (~.cloud_memcache.CreateInstanceRequest): - The request object. Request for - [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1beta2/{parent=projects/*/locations/*}/instances", - "body": "resource", - }, - ] - request, metadata = self._interceptor.pre_create_instance(request, metadata) - pb_request = cloud_memcache.CreateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_instance(resp) - return resp - - class _DeleteInstance(CloudMemcacheRestStub): - def __hash__(self): - return hash("DeleteInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.DeleteInstanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete instance method over HTTP. - - Args: - request (~.cloud_memcache.DeleteInstanceRequest): - The request object. Request for - [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1beta2/{name=projects/*/locations/*/instances/*}", - }, - ] - request, metadata = self._interceptor.pre_delete_instance(request, metadata) - pb_request = cloud_memcache.DeleteInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_instance(resp) - return resp - - class _GetInstance(CloudMemcacheRestStub): - def __hash__(self): - return hash("GetInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.GetInstanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_memcache.Instance: - r"""Call the get instance method over HTTP. - - Args: - request (~.cloud_memcache.GetInstanceRequest): - The request object. Request for - [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloud_memcache.Instance: - A Memorystore for Memcached instance - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta2/{name=projects/*/locations/*/instances/*}", - }, - ] - request, metadata = self._interceptor.pre_get_instance(request, metadata) - pb_request = cloud_memcache.GetInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloud_memcache.Instance() - pb_resp = cloud_memcache.Instance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_instance(resp) - return resp - - class _ListInstances(CloudMemcacheRestStub): - def __hash__(self): - return hash("ListInstances") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.ListInstancesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloud_memcache.ListInstancesResponse: - r"""Call the list instances method over HTTP. - - Args: - request (~.cloud_memcache.ListInstancesRequest): - The request object. Request for - [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.cloud_memcache.ListInstancesResponse: - Response for - [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta2/{parent=projects/*/locations/*}/instances", - }, - ] - request, metadata = self._interceptor.pre_list_instances(request, metadata) - pb_request = cloud_memcache.ListInstancesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = cloud_memcache.ListInstancesResponse() - pb_resp = cloud_memcache.ListInstancesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_instances(resp) - return resp - - class _RescheduleMaintenance(CloudMemcacheRestStub): - def __hash__(self): - return hash("RescheduleMaintenance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.RescheduleMaintenanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the reschedule maintenance method over HTTP. - - Args: - request (~.cloud_memcache.RescheduleMaintenanceRequest): - The request object. Request for - [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1beta2/{instance=projects/*/locations/*/instances/*}:rescheduleMaintenance", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_reschedule_maintenance( - request, metadata - ) - pb_request = cloud_memcache.RescheduleMaintenanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reschedule_maintenance(resp) - return resp - - class _UpdateInstance(CloudMemcacheRestStub): - def __hash__(self): - return hash("UpdateInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask": {}, - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.UpdateInstanceRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the update instance method over HTTP. - - Args: - request (~.cloud_memcache.UpdateInstanceRequest): - The request object. Request for - [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1beta2/{resource.name=projects/*/locations/*/instances/*}", - "body": "resource", - }, - ] - request, metadata = self._interceptor.pre_update_instance(request, metadata) - pb_request = cloud_memcache.UpdateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_instance(resp) - return resp - - class _UpdateParameters(CloudMemcacheRestStub): - def __hash__(self): - return hash("UpdateParameters") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: cloud_memcache.UpdateParametersRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the update parameters method over HTTP. - - Args: - request (~.cloud_memcache.UpdateParametersRequest): - The request object. Request for - [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1beta2/{name=projects/*/locations/*/instances/*}:updateParameters", - "body": "*", - }, - ] - request, metadata = self._interceptor.pre_update_parameters( - request, metadata - ) - pb_request = cloud_memcache.UpdateParametersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_parameters(resp) - return resp - - @property - def apply_parameters( - self, - ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ApplyParameters(self._session, self._host, self._interceptor) # type: ignore - - @property - def apply_software_update( - self, - ) -> Callable[ - [cloud_memcache.ApplySoftwareUpdateRequest], operations_pb2.Operation - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ApplySoftwareUpdate(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_instance( - self, - ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_instance( - self, - ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance( - self, - ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instances( - self, - ) -> Callable[ - [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore - - @property - def reschedule_maintenance( - self, - ) -> Callable[ - [cloud_memcache.RescheduleMaintenanceRequest], operations_pb2.Operation - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance( - self, - ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_parameters( - self, - ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateParameters(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(CloudMemcacheRestStub): - def __call__( - self, - request: locations_pb2.GetLocationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta2/{name=projects/*/locations/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_location(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = locations_pb2.Location() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_location(resp) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(CloudMemcacheRestStub): - def __call__( - self, - request: locations_pb2.ListLocationsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta2/{name=projects/*}/locations", - }, - ] - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_locations(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(CloudMemcacheRestStub): - def __call__( - self, - request: operations_pb2.CancelOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation( - request, metadata - ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - body = json.loads(json.dumps(transcoded_request["body"])) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(CloudMemcacheRestStub): - def __call__( - self, - request: operations_pb2.DeleteOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", - }, - ] - - request, metadata = self._interceptor.pre_delete_operation( - request, metadata - ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(CloudMemcacheRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(CloudMemcacheRestStub): - def __call__( - self, - request: operations_pb2.ListOperationsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta2/{name=projects/*/locations/*}/operations", - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("CloudMemcacheRestTransport",) diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py deleted file mode 100644 index 5605887..0000000 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloud_memcache import ( - ApplyParametersRequest, - ApplySoftwareUpdateRequest, - CreateInstanceRequest, - DeleteInstanceRequest, - GetInstanceRequest, - Instance, - ListInstancesRequest, - ListInstancesResponse, - LocationMetadata, - MaintenancePolicy, - MaintenanceSchedule, - MemcacheParameters, - MemcacheVersion, - OperationMetadata, - RescheduleMaintenanceRequest, - UpdateInstanceRequest, - UpdateParametersRequest, - WeeklyMaintenanceWindow, - ZoneMetadata, -) - -__all__ = ( - "ApplyParametersRequest", - "ApplySoftwareUpdateRequest", - "CreateInstanceRequest", - "DeleteInstanceRequest", - "GetInstanceRequest", - "Instance", - "ListInstancesRequest", - "ListInstancesResponse", - "LocationMetadata", - "MaintenancePolicy", - "MaintenanceSchedule", - "MemcacheParameters", - "OperationMetadata", - "RescheduleMaintenanceRequest", - "UpdateInstanceRequest", - "UpdateParametersRequest", - "WeeklyMaintenanceWindow", - "ZoneMetadata", - "MemcacheVersion", -) diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py deleted file mode 100644 index e786238..0000000 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ /dev/null @@ -1,973 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.cloud.memcache.v1beta2", - manifest={ - "MemcacheVersion", - "Instance", - "MaintenancePolicy", - "WeeklyMaintenanceWindow", - "MaintenanceSchedule", - "ListInstancesRequest", - "ListInstancesResponse", - "GetInstanceRequest", - "CreateInstanceRequest", - "UpdateInstanceRequest", - "DeleteInstanceRequest", - "RescheduleMaintenanceRequest", - "ApplyParametersRequest", - "UpdateParametersRequest", - "ApplySoftwareUpdateRequest", - "MemcacheParameters", - "OperationMetadata", - "LocationMetadata", - "ZoneMetadata", - }, -) - - -class MemcacheVersion(proto.Enum): - r"""Memcached versions supported by our service. - - Values: - MEMCACHE_VERSION_UNSPECIFIED (0): - No description available. - MEMCACHE_1_5 (1): - Memcached 1.5 version. - """ - MEMCACHE_VERSION_UNSPECIFIED = 0 - MEMCACHE_1_5 = 1 - - -class Instance(proto.Message): - r"""A Memorystore for Memcached instance - - Attributes: - name (str): - Required. Unique name of the resource in this scope - including project and location using the form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - - Note: Memcached instances are managed and addressed at the - regional level so ``location_id`` here refers to a Google - Cloud region; however, users may choose which zones - Memcached nodes should be provisioned in within an instance. - Refer to - [zones][google.cloud.memcache.v1beta2.Instance.zones] field - for more details. - display_name (str): - User provided name for the instance, which is - only used for display purposes. Cannot be more - than 80 characters. - labels (MutableMapping[str, str]): - Resource labels to represent user-provided - metadata. Refer to cloud documentation on labels - for more details. - https://cloud.google.com/compute/docs/labeling-resources - authorized_network (str): - The full name of the Google Compute Engine - `network `__ to which - the instance is connected. If left unspecified, the - ``default`` network will be used. - zones (MutableSequence[str]): - Zones in which Memcached nodes should be - provisioned. Memcached nodes will be equally - distributed across these zones. If not provided, - the service will by default create nodes in all - zones in the region for the instance. - node_count (int): - Required. Number of nodes in the Memcached - instance. - node_config (google.cloud.memcache_v1beta2.types.Instance.NodeConfig): - Required. Configuration for Memcached nodes. - memcache_version (google.cloud.memcache_v1beta2.types.MemcacheVersion): - The major version of Memcached software. If not provided, - latest supported version will be used. Currently the latest - supported major version is ``MEMCACHE_1_5``. The minor - version will be automatically determined by our system based - on the latest supported minor version. - parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): - User defined parameters to apply to the - memcached process on each node. - memcache_nodes (MutableSequence[google.cloud.memcache_v1beta2.types.Instance.Node]): - Output only. List of Memcached nodes. Refer to - [Node][google.cloud.memcache.v1beta2.Instance.Node] message - for more details. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the instance was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the instance was - updated. - state (google.cloud.memcache_v1beta2.types.Instance.State): - Output only. The state of this Memcached - instance. - memcache_full_version (str): - Output only. The full version of memcached - server running on this instance. System - automatically determines the full memcached - version for an instance based on the input - MemcacheVersion. - The full version format will be - "memcached-1.5.16". - instance_messages (MutableSequence[google.cloud.memcache_v1beta2.types.Instance.InstanceMessage]): - List of messages that describe the current - state of the Memcached instance. - discovery_endpoint (str): - Output only. Endpoint for the Discovery API. - update_available (bool): - Output only. Returns true if there is an - update waiting to be applied - maintenance_policy (google.cloud.memcache_v1beta2.types.MaintenancePolicy): - The maintenance policy for the instance. If - not provided, the maintenance event will be - performed based on Memorystore internal rollout - schedule. - maintenance_schedule (google.cloud.memcache_v1beta2.types.MaintenanceSchedule): - Output only. Published maintenance schedule. - """ - - class State(proto.Enum): - r"""Different states of a Memcached instance. - - Values: - STATE_UNSPECIFIED (0): - State not set. - CREATING (1): - Memcached instance is being created. - READY (2): - Memcached instance has been created and ready - to be used. - UPDATING (3): - Memcached instance is updating configuration - such as maintenance policy and schedule. - DELETING (4): - Memcached instance is being deleted. - PERFORMING_MAINTENANCE (5): - Memcached instance is going through - maintenance, e.g. data plane rollout. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - UPDATING = 3 - DELETING = 4 - PERFORMING_MAINTENANCE = 5 - - class NodeConfig(proto.Message): - r"""Configuration for a Memcached Node. - - Attributes: - cpu_count (int): - Required. Number of cpus per Memcached node. - memory_size_mb (int): - Required. Memory size in MiB for each - Memcached node. - """ - - cpu_count: int = proto.Field( - proto.INT32, - number=1, - ) - memory_size_mb: int = proto.Field( - proto.INT32, - number=2, - ) - - class Node(proto.Message): - r""" - - Attributes: - node_id (str): - Output only. Identifier of the Memcached - node. The node id does not include project or - location like the Memcached instance name. - zone (str): - Output only. Location (GCP Zone) for the - Memcached node. - state (google.cloud.memcache_v1beta2.types.Instance.Node.State): - Output only. Current state of the Memcached - node. - host (str): - Output only. Hostname or IP address of the - Memcached node used by the clients to connect to - the Memcached server on this node. - port (int): - Output only. The port number of the Memcached - server on this node. - parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): - User defined parameters currently applied to - the node. - update_available (bool): - Output only. Returns true if there is an - update waiting to be applied - """ - - class State(proto.Enum): - r"""Different states of a Memcached node. - - Values: - STATE_UNSPECIFIED (0): - Node state is not set. - CREATING (1): - Node is being created. - READY (2): - Node has been created and ready to be used. - DELETING (3): - Node is being deleted. - UPDATING (4): - Node is being updated. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - DELETING = 3 - UPDATING = 4 - - node_id: str = proto.Field( - proto.STRING, - number=1, - ) - zone: str = proto.Field( - proto.STRING, - number=2, - ) - state: "Instance.Node.State" = proto.Field( - proto.ENUM, - number=3, - enum="Instance.Node.State", - ) - host: str = proto.Field( - proto.STRING, - number=4, - ) - port: int = proto.Field( - proto.INT32, - number=5, - ) - parameters: "MemcacheParameters" = proto.Field( - proto.MESSAGE, - number=6, - message="MemcacheParameters", - ) - update_available: bool = proto.Field( - proto.BOOL, - number=7, - ) - - class InstanceMessage(proto.Message): - r""" - - Attributes: - code (google.cloud.memcache_v1beta2.types.Instance.InstanceMessage.Code): - A code that correspond to one type of - user-facing message. - message (str): - Message on memcached instance which will be - exposed to users. - """ - - class Code(proto.Enum): - r""" - - Values: - CODE_UNSPECIFIED (0): - Message Code not set. - ZONE_DISTRIBUTION_UNBALANCED (1): - Memcached nodes are distributed unevenly. - """ - CODE_UNSPECIFIED = 0 - ZONE_DISTRIBUTION_UNBALANCED = 1 - - code: "Instance.InstanceMessage.Code" = proto.Field( - proto.ENUM, - number=1, - enum="Instance.InstanceMessage.Code", - ) - message: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - authorized_network: str = proto.Field( - proto.STRING, - number=4, - ) - zones: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - node_count: int = proto.Field( - proto.INT32, - number=6, - ) - node_config: NodeConfig = proto.Field( - proto.MESSAGE, - number=7, - message=NodeConfig, - ) - memcache_version: "MemcacheVersion" = proto.Field( - proto.ENUM, - number=9, - enum="MemcacheVersion", - ) - parameters: "MemcacheParameters" = proto.Field( - proto.MESSAGE, - number=11, - message="MemcacheParameters", - ) - memcache_nodes: MutableSequence[Node] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=Node, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=15, - enum=State, - ) - memcache_full_version: str = proto.Field( - proto.STRING, - number=18, - ) - instance_messages: MutableSequence[InstanceMessage] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message=InstanceMessage, - ) - discovery_endpoint: str = proto.Field( - proto.STRING, - number=20, - ) - update_available: bool = proto.Field( - proto.BOOL, - number=21, - ) - maintenance_policy: "MaintenancePolicy" = proto.Field( - proto.MESSAGE, - number=22, - message="MaintenancePolicy", - ) - maintenance_schedule: "MaintenanceSchedule" = proto.Field( - proto.MESSAGE, - number=23, - message="MaintenanceSchedule", - ) - - -class MaintenancePolicy(proto.Message): - r"""Maintenance policy per instance. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the policy was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the policy was - updated. - description (str): - Description of what this policy is for. Create/Update - methods return INVALID_ARGUMENT if the length is greater - than 512. - weekly_maintenance_window (MutableSequence[google.cloud.memcache_v1beta2.types.WeeklyMaintenanceWindow]): - Required. Maintenance window that is applied to resources - covered by this policy. Minimum 1. For the current version, - the maximum number of weekly_maintenance_windows is expected - to be one. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - weekly_maintenance_window: MutableSequence[ - "WeeklyMaintenanceWindow" - ] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message="WeeklyMaintenanceWindow", - ) - - -class WeeklyMaintenanceWindow(proto.Message): - r"""Time window specified for weekly operations. - - Attributes: - day (google.type.dayofweek_pb2.DayOfWeek): - Required. Allows to define schedule that runs - specified day of the week. - start_time (google.type.timeofday_pb2.TimeOfDay): - Required. Start time of the window in UTC. - duration (google.protobuf.duration_pb2.Duration): - Required. Duration of the time window. - """ - - day: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=1, - enum=dayofweek_pb2.DayOfWeek, - ) - start_time: timeofday_pb2.TimeOfDay = proto.Field( - proto.MESSAGE, - number=2, - message=timeofday_pb2.TimeOfDay, - ) - duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - - -class MaintenanceSchedule(proto.Message): - r"""Upcoming maintenance schedule. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The start time of any upcoming - scheduled maintenance for this instance. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The end time of any upcoming - scheduled maintenance for this instance. - schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The deadline that the - maintenance schedule start time can not go - beyond, including reschedule. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancesRequest(proto.Message): - r"""Request for - [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - - Attributes: - parent (str): - Required. The resource name of the instance location using - the form: ``projects/{project_id}/locations/{location_id}`` - where ``location_id`` refers to a GCP region - page_size (int): - The maximum number of items to return. - - If not specified, a default value of 1000 will be used by - the service. Regardless of the ``page_size`` value, the - response may include a partial list and a caller should only - rely on response's - [``next_page_token``][google.cloud.memcache.v1beta2.ListInstancesResponse.next_page_token] - to determine if there are more instances left to be queried. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - filter (str): - List filter. For example, exclude all Memcached instances - with name as my-instance by specifying - ``"name != my-instance"``. - order_by (str): - Sort results. Supported values are "name", - "name desc" or "" (unsorted). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListInstancesResponse(proto.Message): - r"""Response for - [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - - Attributes: - resources (MutableSequence[google.cloud.memcache_v1beta2.types.Instance]): - A list of Memcached instances in the project in the - specified location, or across all locations. - - If the ``location_id`` in the parent field of the request is - "-", all regions available to the project are queried, and - the results aggregated. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - resources: MutableSequence["Instance"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Instance", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetInstanceRequest(proto.Message): - r"""Request for - [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. - - Attributes: - name (str): - Required. Memcached instance resource name in the format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateInstanceRequest(proto.Message): - r"""Request for - [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. - - Attributes: - parent (str): - Required. The resource name of the instance location using - the form: ``projects/{project_id}/locations/{location_id}`` - where ``location_id`` refers to a GCP region - instance_id (str): - Required. The logical name of the Memcached instance in the - user project with the following restrictions: - - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the user project / location. - - If any of the above are not met, the API raises an invalid - argument error. - resource (google.cloud.memcache_v1beta2.types.Instance): - Required. A Memcached [Instance] resource - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - instance_id: str = proto.Field( - proto.STRING, - number=2, - ) - resource: "Instance" = proto.Field( - proto.MESSAGE, - number=3, - message="Instance", - ) - - -class UpdateInstanceRequest(proto.Message): - r"""Request for - [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - - - ``displayName`` - resource (google.cloud.memcache_v1beta2.types.Instance): - Required. A Memcached [Instance] resource. Only fields - specified in update_mask are updated. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - resource: "Instance" = proto.Field( - proto.MESSAGE, - number=2, - message="Instance", - ) - - -class DeleteInstanceRequest(proto.Message): - r"""Request for - [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. - - Attributes: - name (str): - Required. Memcached instance resource name in the format: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RescheduleMaintenanceRequest(proto.Message): - r"""Request for - [RescheduleMaintenance][google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance]. - - Attributes: - instance (str): - Required. Memcache instance resource name using the form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region. - reschedule_type (google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType): - Required. If reschedule type is SPECIFIC_TIME, must set up - schedule_time as well. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when the maintenance shall be rescheduled to if - reschedule_type=SPECIFIC_TIME, in RFC 3339 format, for - example ``2012-11-15T16:19:00.094Z``. - """ - - class RescheduleType(proto.Enum): - r"""Reschedule options. - - Values: - RESCHEDULE_TYPE_UNSPECIFIED (0): - Not set. - IMMEDIATE (1): - If the user wants to schedule the maintenance - to happen now. - NEXT_AVAILABLE_WINDOW (2): - If the user wants to use the existing - maintenance policy to find the next available - window. - SPECIFIC_TIME (3): - If the user wants to reschedule the - maintenance to a specific time. - """ - RESCHEDULE_TYPE_UNSPECIFIED = 0 - IMMEDIATE = 1 - NEXT_AVAILABLE_WINDOW = 2 - SPECIFIC_TIME = 3 - - instance: str = proto.Field( - proto.STRING, - number=1, - ) - reschedule_type: RescheduleType = proto.Field( - proto.ENUM, - number=2, - enum=RescheduleType, - ) - schedule_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class ApplyParametersRequest(proto.Message): - r"""Request for - [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. - - Attributes: - name (str): - Required. Resource name of the Memcached - instance for which parameter group updates - should be applied. - node_ids (MutableSequence[str]): - Nodes to which the instance-level parameter - group is applied. - apply_all (bool): - Whether to apply instance-level parameter group to all - nodes. If set to true, users are restricted from specifying - individual nodes, and ``ApplyParameters`` updates all nodes - within the instance. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - node_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - apply_all: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class UpdateParametersRequest(proto.Message): - r"""Request for - [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. - - Attributes: - name (str): - Required. Resource name of the Memcached - instance for which the parameters should be - updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): - The parameters to apply to the instance. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - parameters: "MemcacheParameters" = proto.Field( - proto.MESSAGE, - number=3, - message="MemcacheParameters", - ) - - -class ApplySoftwareUpdateRequest(proto.Message): - r"""Request for - [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. - - Attributes: - instance (str): - Required. Resource name of the Memcached - instance for which software update should be - applied. - node_ids (MutableSequence[str]): - Nodes to which we should apply the update to. - Note all the selected nodes are updated in - parallel. - apply_all (bool): - Whether to apply the update to all nodes. If - set to true, will explicitly restrict users from - specifying any nodes, and apply software update - to all nodes (where applicable) within the - instance. - """ - - instance: str = proto.Field( - proto.STRING, - number=1, - ) - node_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - apply_all: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class MemcacheParameters(proto.Message): - r""" - - Attributes: - id (str): - Output only. The unique ID associated with - this set of parameters. Users can use this id to - determine if the parameters associated with the - instance differ from the parameters associated - with the nodes. A discrepancy between parameter - ids can inform users that they may need to take - action to apply parameters on nodes. - params (MutableMapping[str, str]): - User defined set of parameters to use in the - memcached process. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of a long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_detail (str): - Output only. Human-readable status of the - operation, if any. - cancel_requested (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_detail: str = proto.Field( - proto.STRING, - number=5, - ) - cancel_requested: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class LocationMetadata(proto.Message): - r"""Metadata for the given - [google.cloud.location.Location][google.cloud.location.Location]. - - Attributes: - available_zones (MutableMapping[str, google.cloud.memcache_v1beta2.types.ZoneMetadata]): - Output only. The set of available zones in the location. The - map is keyed by the lowercase ID of each zone, as defined by - GCE. These keys can be specified in the ``zones`` field when - creating a Memcached instance. - """ - - available_zones: MutableMapping[str, "ZoneMetadata"] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message="ZoneMetadata", - ) - - -class ZoneMetadata(proto.Message): - r""" """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/memcache-v1beta2-py.tar.gz b/memcache-v1beta2-py.tar.gz deleted file mode 100644 index e69de29..0000000 diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 574c5ae..0000000 --- a/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/noxfile.py b/noxfile.py deleted file mode 100644 index 95e58c5..0000000 --- a/noxfile.py +++ /dev/null @@ -1,426 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! - -from __future__ import absolute_import - -import os -import pathlib -import re -import shutil -import warnings - -import nox - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -DEFAULT_PYTHON_VERSION = "3.8" - -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} - -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -# 'docfx' is excluded since it only needs to run in 'docs-presubmit' -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -def default(session): - # Install all test dependencies, then install this package in-place. - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) - - -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - -def install_systemtest_dependencies(session, *constraints): - - # Use pre-release gRPC for system tests. - # Exclude version 1.52.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/issues/32163 - session.install("--pre", "grpcio!=1.52.0rc1") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.9") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - "sphinx==4.0.1", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.9") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - "sphinx==4.0.1", - "alabaster", - "recommonmark", - "gcp-sphinx-docfx-yaml", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): - """Run all tests with prerelease versions of dependencies installed.""" - - # Install all dependencies - session.install("-e", ".[all, tests, tracing]") - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "protobuf", - # dependency of grpc - "six", - "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", - "grpcio-status", - "google-api-core", - "proto-plus", - "google-cloud-testutils", - # dependencies of google-cloud-testutils" - "click", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - "google-auth", - ] - session.install(*other_deps) - - # Print out prerelease package versions - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run("python", "-c", "import grpc; print(grpc.__version__)") - - session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/owlbot.py b/owlbot.py deleted file mode 100644 index ce738f0..0000000 --- a/owlbot.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -from pathlib import Path -import shutil - -import synthtool as s -import synthtool.gcp as gcp -from synthtool.languages import python - -# ---------------------------------------------------------------------------- -# Copy the generated client from the owl-bot staging directory -# ---------------------------------------------------------------------------- - -clean_up_generated_samples = True - -# Load the default version defined in .repo-metadata.json. -default_version = json.load(open(".repo-metadata.json", "rt")).get( - "default_version" -) - -for library in s.get_staging_dirs(default_version): - if clean_up_generated_samples: - shutil.rmtree("samples/generated_samples", ignore_errors=True) - clean_up_generated_samples = False - s.move([library], excludes=["**/gapic_version.py"]) -s.remove_staging_dirs() - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- - -templated_files = gcp.CommonTemplates().py_library( - cov_level=100, - microgenerator=True, - versions=gcp.common.detect_versions(path="./google", default_first=True), -) -s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml"]) - -python.py_samples(skip_readmes=True) - -# run format session for all directories which have a noxfile -for noxfile in Path(".").glob("**/noxfile.py"): - s.shell.run(["nox", "-s", "format"], cwd=noxfile.parent, hide_output=False) diff --git a/release-please-config.json b/release-please-config.json deleted file mode 100644 index e7faaa4..0000000 --- a/release-please-config.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", - "packages": { - ".": { - "release-type": "python", - "extra-files": [ - "google/cloud/memcache_v1beta2/gapic_version.py", - "google/cloud/memcache_v1/gapic_version.py", - "google/cloud/memcache/gapic_version.py", - { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json", - "jsonpath": "$.clientLibrary.version" - }, - { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json", - "jsonpath": "$.clientLibrary.version" - } - ] - } - }, - "release-type": "python", - "plugins": [ - { - "type": "sentence-case" - } - ], - "initial-version": "0.1.0" -} diff --git a/renovate.json b/renovate.json deleted file mode 100644 index 39b2a0e..0000000 --- a/renovate.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "extends": [ - "config:base", - "group:all", - ":preserveSemverRanges", - ":disableDependencyDashboard" - ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], - "pip_requirements": { - "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] - } -} diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py deleted file mode 100644 index 4d7ed2e..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApplyParameters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_ApplyParameters_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -async def sample_apply_parameters(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.ApplyParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.apply_parameters(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_ApplyParameters_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py deleted file mode 100644 index 4856300..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_apply_parameters_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApplyParameters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_ApplyParameters_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -def sample_apply_parameters(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.ApplyParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.apply_parameters(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_ApplyParameters_sync] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py deleted file mode 100644 index 3e8e97d..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_CreateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -async def sample_create_instance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - instance = memcache_v1.Instance() - instance.name = "name_value" - instance.node_count = 1070 - instance.node_config.cpu_count = 976 - instance.node_config.memory_size_mb = 1505 - - request = memcache_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_CreateInstance_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py deleted file mode 100644 index b3e3779..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_create_instance_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_CreateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -def sample_create_instance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - instance = memcache_v1.Instance() - instance.name = "name_value" - instance.node_count = 1070 - instance.node_config.cpu_count = 976 - instance.node_config.memory_size_mb = 1505 - - request = memcache_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_CreateInstance_sync] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py deleted file mode 100644 index c74323e..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_DeleteInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -async def sample_delete_instance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_DeleteInstance_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py deleted file mode 100644 index 07a52fb..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_delete_instance_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_DeleteInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -def sample_delete_instance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_DeleteInstance_sync] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py deleted file mode 100644 index 83a14bb..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_GetInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -async def sample_get_instance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_GetInstance_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py deleted file mode 100644 index 968c860..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_get_instance_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_GetInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -def sample_get_instance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_GetInstance_sync] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py deleted file mode 100644 index 0b03e65..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_ListInstances_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -async def sample_list_instances(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END memcache_v1_generated_CloudMemcache_ListInstances_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py deleted file mode 100644 index 33af66e..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_list_instances_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_ListInstances_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -def sample_list_instances(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END memcache_v1_generated_CloudMemcache_ListInstances_sync] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py deleted file mode 100644 index 11b8744..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RescheduleMaintenance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -async def sample_reschedule_maintenance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.RescheduleMaintenanceRequest( - instance="instance_value", - reschedule_type="SPECIFIC_TIME", - ) - - # Make the request - operation = client.reschedule_maintenance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py deleted file mode 100644 index 821c80e..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RescheduleMaintenance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -def sample_reschedule_maintenance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.RescheduleMaintenanceRequest( - instance="instance_value", - reschedule_type="SPECIFIC_TIME", - ) - - # Make the request - operation = client.reschedule_maintenance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py deleted file mode 100644 index 257fc15..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_UpdateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -async def sample_update_instance(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - instance = memcache_v1.Instance() - instance.name = "name_value" - instance.node_count = 1070 - instance.node_config.cpu_count = 976 - instance.node_config.memory_size_mb = 1505 - - request = memcache_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_UpdateInstance_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py deleted file mode 100644 index bfe1965..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_instance_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_UpdateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -def sample_update_instance(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - instance = memcache_v1.Instance() - instance.name = "name_value" - instance.node_count = 1070 - instance.node_config.cpu_count = 976 - instance.node_config.memory_size_mb = 1505 - - request = memcache_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_UpdateInstance_sync] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py deleted file mode 100644 index 94949ec..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateParameters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_UpdateParameters_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -async def sample_update_parameters(): - # Create a client - client = memcache_v1.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1.UpdateParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.update_parameters(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_UpdateParameters_async] diff --git a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py deleted file mode 100644 index c78b667..0000000 --- a/samples/generated_samples/memcache_v1_generated_cloud_memcache_update_parameters_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateParameters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1_generated_CloudMemcache_UpdateParameters_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1 - - -def sample_update_parameters(): - # Create a client - client = memcache_v1.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1.UpdateParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.update_parameters(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1_generated_CloudMemcache_UpdateParameters_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py deleted file mode 100644 index e2f69e7..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApplyParameters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_apply_parameters(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ApplyParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.apply_parameters(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py deleted file mode 100644 index 5e38aaf..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApplyParameters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_apply_parameters(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ApplyParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.apply_parameters(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py deleted file mode 100644 index 23486a7..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApplySoftwareUpdate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_apply_software_update(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ApplySoftwareUpdateRequest( - instance="instance_value", - ) - - # Make the request - operation = client.apply_software_update(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py deleted file mode 100644 index 1a06e32..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ApplySoftwareUpdate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_apply_software_update(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ApplySoftwareUpdateRequest( - instance="instance_value", - ) - - # Make the request - operation = client.apply_software_update(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py deleted file mode 100644 index bba1842..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_CreateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_create_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - resource = memcache_v1beta2.Instance() - resource.name = "name_value" - resource.node_count = 1070 - resource.node_config.cpu_count = 976 - resource.node_config.memory_size_mb = 1505 - - request = memcache_v1beta2.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - resource=resource, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_CreateInstance_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py deleted file mode 100644 index 68d4a3b..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_create_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - resource = memcache_v1beta2.Instance() - resource.name = "name_value" - resource.node_count = 1070 - resource.node_config.cpu_count = 976 - resource.node_config.memory_size_mb = 1505 - - request = memcache_v1beta2.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - resource=resource, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py deleted file mode 100644 index 90e9057..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_delete_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py deleted file mode 100644 index af3c4b3..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_delete_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py deleted file mode 100644 index 16fbc6d..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_GetInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_get_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_GetInstance_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py deleted file mode 100644 index b0ff048..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_GetInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_get_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_GetInstance_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py deleted file mode 100644 index e2feb2d..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_ListInstances_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_list_instances(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_ListInstances_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py deleted file mode 100644 index 96eec1b..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_ListInstances_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_list_instances(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_ListInstances_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py deleted file mode 100644 index c700a67..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RescheduleMaintenance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_reschedule_maintenance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.RescheduleMaintenanceRequest( - instance="instance_value", - reschedule_type="SPECIFIC_TIME", - ) - - # Make the request - operation = client.reschedule_maintenance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py deleted file mode 100644 index 20b2f08..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RescheduleMaintenance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_reschedule_maintenance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.RescheduleMaintenanceRequest( - instance="instance_value", - reschedule_type="SPECIFIC_TIME", - ) - - # Make the request - operation = client.reschedule_maintenance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py deleted file mode 100644 index f401e3a..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_update_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - resource = memcache_v1beta2.Instance() - resource.name = "name_value" - resource.node_count = 1070 - resource.node_config.cpu_count = 976 - resource.node_config.memory_size_mb = 1505 - - request = memcache_v1beta2.UpdateInstanceRequest( - resource=resource, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py deleted file mode 100644 index f1e71df..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_update_instance(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - resource = memcache_v1beta2.Instance() - resource.name = "name_value" - resource.node_count = 1070 - resource.node_config.cpu_count = 976 - resource.node_config.memory_size_mb = 1505 - - request = memcache_v1beta2.UpdateInstanceRequest( - resource=resource, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py deleted file mode 100644 index 4606935..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateParameters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -async def sample_update_parameters(): - # Create a client - client = memcache_v1beta2.CloudMemcacheAsyncClient() - - # Initialize request argument(s) - request = memcache_v1beta2.UpdateParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.update_parameters(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async] diff --git a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py b/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py deleted file mode 100644 index 00013aa..0000000 --- a/samples/generated_samples/memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateParameters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-memcache - - -# [START memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import memcache_v1beta2 - - -def sample_update_parameters(): - # Create a client - client = memcache_v1beta2.CloudMemcacheClient() - - # Initialize request argument(s) - request = memcache_v1beta2.UpdateParametersRequest( - name="name_value", - ) - - # Make the request - operation = client.update_parameters(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync] diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json deleted file mode 100644 index 87d2277..0000000 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ /dev/null @@ -1,1375 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.memcache.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-memcache", - "version": "1.7.1" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.apply_parameters", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.ApplyParameters", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ApplyParameters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.ApplyParametersRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "node_ids", - "type": "MutableSequence[str]" - }, - { - "name": "apply_all", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "apply_parameters" - }, - "description": "Sample for ApplyParameters", - "file": "memcache_v1_generated_cloud_memcache_apply_parameters_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_ApplyParameters_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_apply_parameters_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.apply_parameters", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.ApplyParameters", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ApplyParameters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.ApplyParametersRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "node_ids", - "type": "MutableSequence[str]" - }, - { - "name": "apply_all", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "apply_parameters" - }, - "description": "Sample for ApplyParameters", - "file": "memcache_v1_generated_cloud_memcache_apply_parameters_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_ApplyParameters_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_apply_parameters_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.create_instance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.CreateInstance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "CreateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.CreateInstanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance", - "type": "google.cloud.memcache_v1.types.Instance" - }, - { - "name": "instance_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_instance" - }, - "description": "Sample for CreateInstance", - "file": "memcache_v1_generated_cloud_memcache_create_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_CreateInstance_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_create_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.create_instance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.CreateInstance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "CreateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.CreateInstanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance", - "type": "google.cloud.memcache_v1.types.Instance" - }, - { - "name": "instance_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance" - }, - "description": "Sample for CreateInstance", - "file": "memcache_v1_generated_cloud_memcache_create_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_CreateInstance_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_create_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.delete_instance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.DeleteInstance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "DeleteInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.DeleteInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_instance" - }, - "description": "Sample for DeleteInstance", - "file": "memcache_v1_generated_cloud_memcache_delete_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_DeleteInstance_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_delete_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.delete_instance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.DeleteInstance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "DeleteInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.DeleteInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_instance" - }, - "description": "Sample for DeleteInstance", - "file": "memcache_v1_generated_cloud_memcache_delete_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_DeleteInstance_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_delete_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.get_instance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.GetInstance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.memcache_v1.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "memcache_v1_generated_cloud_memcache_get_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_GetInstance_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_get_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.get_instance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.GetInstance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.memcache_v1.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "memcache_v1_generated_cloud_memcache_get_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_GetInstance_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_get_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.list_instances", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.ListInstances", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesAsyncPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "memcache_v1_generated_cloud_memcache_list_instances_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_ListInstances_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_list_instances_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.list_instances", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.ListInstances", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "memcache_v1_generated_cloud_memcache_list_instances_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_ListInstances_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_list_instances_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.reschedule_maintenance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "RescheduleMaintenance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest" - }, - { - "name": "instance", - "type": "str" - }, - { - "name": "reschedule_type", - "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType" - }, - { - "name": "schedule_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "reschedule_maintenance" - }, - "description": "Sample for RescheduleMaintenance", - "file": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_RescheduleMaintenance_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.reschedule_maintenance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.RescheduleMaintenance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "RescheduleMaintenance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest" - }, - { - "name": "instance", - "type": "str" - }, - { - "name": "reschedule_type", - "type": "google.cloud.memcache_v1.types.RescheduleMaintenanceRequest.RescheduleType" - }, - { - "name": "schedule_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "reschedule_maintenance" - }, - "description": "Sample for RescheduleMaintenance", - "file": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_RescheduleMaintenance_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_reschedule_maintenance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.update_instance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.UpdateInstance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.UpdateInstanceRequest" - }, - { - "name": "instance", - "type": "google.cloud.memcache_v1.types.Instance" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "memcache_v1_generated_cloud_memcache_update_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_UpdateInstance_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_update_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.update_instance", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.UpdateInstance", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.UpdateInstanceRequest" - }, - { - "name": "instance", - "type": "google.cloud.memcache_v1.types.Instance" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "memcache_v1_generated_cloud_memcache_update_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_UpdateInstance_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_update_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheAsyncClient.update_parameters", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.UpdateParameters", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "UpdateParameters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.UpdateParametersRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "parameters", - "type": "google.cloud.memcache_v1.types.MemcacheParameters" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_parameters" - }, - "description": "Sample for UpdateParameters", - "file": "memcache_v1_generated_cloud_memcache_update_parameters_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_UpdateParameters_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_update_parameters_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1.CloudMemcacheClient.update_parameters", - "method": { - "fullName": "google.cloud.memcache.v1.CloudMemcache.UpdateParameters", - "service": { - "fullName": "google.cloud.memcache.v1.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "UpdateParameters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1.types.UpdateParametersRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "parameters", - "type": "google.cloud.memcache_v1.types.MemcacheParameters" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_parameters" - }, - "description": "Sample for UpdateParameters", - "file": "memcache_v1_generated_cloud_memcache_update_parameters_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1_generated_CloudMemcache_UpdateParameters_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1_generated_cloud_memcache_update_parameters_sync.py" - } - ] -} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json deleted file mode 100644 index 6152ae0..0000000 --- a/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ /dev/null @@ -1,1552 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.memcache.v1beta2", - "version": "v1beta2" - } - ], - "language": "PYTHON", - "name": "google-cloud-memcache", - "version": "1.7.1" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.apply_parameters", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ApplyParameters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.ApplyParametersRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "node_ids", - "type": "MutableSequence[str]" - }, - { - "name": "apply_all", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "apply_parameters" - }, - "description": "Sample for ApplyParameters", - "file": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplyParameters_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.apply_parameters", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ApplyParameters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.ApplyParametersRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "node_ids", - "type": "MutableSequence[str]" - }, - { - "name": "apply_all", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "apply_parameters" - }, - "description": "Sample for ApplyParameters", - "file": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplyParameters_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_apply_parameters_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.apply_software_update", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ApplySoftwareUpdate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest" - }, - { - "name": "instance", - "type": "str" - }, - { - "name": "node_ids", - "type": "MutableSequence[str]" - }, - { - "name": "apply_all", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "apply_software_update" - }, - "description": "Sample for ApplySoftwareUpdate", - "file": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.apply_software_update", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ApplySoftwareUpdate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest" - }, - { - "name": "instance", - "type": "str" - }, - { - "name": "node_ids", - "type": "MutableSequence[str]" - }, - { - "name": "apply_all", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "apply_software_update" - }, - "description": "Sample for ApplySoftwareUpdate", - "file": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_ApplySoftwareUpdate_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_apply_software_update_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.create_instance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "CreateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.CreateInstanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_id", - "type": "str" - }, - { - "name": "resource", - "type": "google.cloud.memcache_v1beta2.types.Instance" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_instance" - }, - "description": "Sample for CreateInstance", - "file": "memcache_v1beta2_generated_cloud_memcache_create_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_CreateInstance_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_create_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.create_instance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "CreateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.CreateInstanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_id", - "type": "str" - }, - { - "name": "resource", - "type": "google.cloud.memcache_v1beta2.types.Instance" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance" - }, - "description": "Sample for CreateInstance", - "file": "memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_CreateInstance_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_create_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.delete_instance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "DeleteInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.DeleteInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_instance" - }, - "description": "Sample for DeleteInstance", - "file": "memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_DeleteInstance_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_delete_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.delete_instance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "DeleteInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.DeleteInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_instance" - }, - "description": "Sample for DeleteInstance", - "file": "memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_DeleteInstance_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_delete_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.get_instance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.GetInstance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.memcache_v1beta2.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "memcache_v1beta2_generated_cloud_memcache_get_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_GetInstance_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_get_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.get_instance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.GetInstance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.memcache_v1beta2.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_GetInstance_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_get_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.list_instances", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ListInstances", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesAsyncPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "memcache_v1beta2_generated_cloud_memcache_list_instances_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_ListInstances_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_list_instances_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.list_instances", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.ListInstances", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_ListInstances_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_list_instances_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.reschedule_maintenance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "RescheduleMaintenance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest" - }, - { - "name": "instance", - "type": "str" - }, - { - "name": "reschedule_type", - "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType" - }, - { - "name": "schedule_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "reschedule_maintenance" - }, - "description": "Sample for RescheduleMaintenance", - "file": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.reschedule_maintenance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.RescheduleMaintenance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "RescheduleMaintenance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest" - }, - { - "name": "instance", - "type": "str" - }, - { - "name": "reschedule_type", - "type": "google.cloud.memcache_v1beta2.types.RescheduleMaintenanceRequest.RescheduleType" - }, - { - "name": "schedule_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "reschedule_maintenance" - }, - "description": "Sample for RescheduleMaintenance", - "file": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_RescheduleMaintenance_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_reschedule_maintenance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.update_instance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.UpdateInstanceRequest" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "resource", - "type": "google.cloud.memcache_v1beta2.types.Instance" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "memcache_v1beta2_generated_cloud_memcache_update_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateInstance_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_update_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.update_instance", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.UpdateInstanceRequest" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "resource", - "type": "google.cloud.memcache_v1beta2.types.Instance" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateInstance_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_update_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient", - "shortName": "CloudMemcacheAsyncClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheAsyncClient.update_parameters", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "UpdateParameters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.UpdateParametersRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "parameters", - "type": "google.cloud.memcache_v1beta2.types.MemcacheParameters" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_parameters" - }, - "description": "Sample for UpdateParameters", - "file": "memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateParameters_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_update_parameters_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient", - "shortName": "CloudMemcacheClient" - }, - "fullName": "google.cloud.memcache_v1beta2.CloudMemcacheClient.update_parameters", - "method": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters", - "service": { - "fullName": "google.cloud.memcache.v1beta2.CloudMemcache", - "shortName": "CloudMemcache" - }, - "shortName": "UpdateParameters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.memcache_v1beta2.types.UpdateParametersRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "parameters", - "type": "google.cloud.memcache_v1beta2.types.MemcacheParameters" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_parameters" - }, - "description": "Sample for UpdateParameters", - "file": "memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "memcache_v1beta2_generated_CloudMemcache_UpdateParameters_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "memcache_v1beta2_generated_cloud_memcache_update_parameters_sync.py" - } - ] -} diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh deleted file mode 100755 index 21f6d2a..0000000 --- a/scripts/decrypt-secrets.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash - -# Copyright 2015 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -ROOT=$( dirname "$DIR" ) - -# Work from the project root. -cd $ROOT - -# Prevent it from overriding files. -# We recommend that sample authors use their own service account files and cloud project. -# In that case, they are supposed to prepare these files by themselves. -if [[ -f "testing/test-env.sh" ]] || \ - [[ -f "testing/service-account.json" ]] || \ - [[ -f "testing/client-secrets.json" ]]; then - echo "One or more target files exist, aborting." - exit 1 -fi - -# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. -PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" - -gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ - --project="${PROJECT_ID}" \ - > testing/test-env.sh -gcloud secrets versions access latest \ - --secret="python-docs-samples-service-account" \ - --project="${PROJECT_ID}" \ - > testing/service-account.json -gcloud secrets versions access latest \ - --secret="python-docs-samples-client-secrets" \ - --project="${PROJECT_ID}" \ - > testing/client-secrets.json diff --git a/scripts/fixup_keywords.py b/scripts/fixup_keywords.py deleted file mode 100644 index c2088a0..0000000 --- a/scripts/fixup_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class memcacheCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'resource', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_instance': ('update_mask', 'resource', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=memcacheCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the memcache client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py deleted file mode 100644 index de61ce0..0000000 --- a/scripts/fixup_memcache_v1_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class memcacheCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'reschedule_maintenance': ('instance', 'reschedule_type', 'schedule_time', ), - 'update_instance': ('update_mask', 'instance', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=memcacheCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the memcache client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py deleted file mode 100644 index b082016..0000000 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ /dev/null @@ -1,184 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class memcacheCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'apply_parameters': ('name', 'node_ids', 'apply_all', ), - 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), - 'create_instance': ('parent', 'instance_id', 'resource', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'reschedule_maintenance': ('instance', 'reschedule_type', 'schedule_time', ), - 'update_instance': ('update_mask', 'resource', ), - 'update_parameters': ('name', 'update_mask', 'parameters', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=memcacheCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the memcache client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py deleted file mode 100644 index 91b5967..0000000 --- a/scripts/readme-gen/readme_gen.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Generates READMEs using configuration defined in yaml.""" - -import argparse -import io -import os -import subprocess - -import jinja2 -import yaml - - -jinja_env = jinja2.Environment( - trim_blocks=True, - loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) - ), - autoescape=True, -) - -README_TMPL = jinja_env.get_template('README.tmpl.rst') - - -def get_help(file): - return subprocess.check_output(['python', file, '--help']).decode() - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('source') - parser.add_argument('--destination', default='README.rst') - - args = parser.parse_args() - - source = os.path.abspath(args.source) - root = os.path.dirname(source) - destination = os.path.join(root, args.destination) - - jinja_env.globals['get_help'] = get_help - - with io.open(source, 'r') as f: - config = yaml.load(f) - - # This allows get_help to execute in the right directory. - os.chdir(root) - - output = README_TMPL.render(config) - - with io.open(destination, 'w') as f: - f.write(output) - - -if __name__ == '__main__': - main() diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst deleted file mode 100644 index 4fd2397..0000000 --- a/scripts/readme-gen/templates/README.tmpl.rst +++ /dev/null @@ -1,87 +0,0 @@ -{# The following line is a lie. BUT! Once jinja2 is done with it, it will - become truth! #} -.. This file is automatically generated. Do not edit this file directly. - -{{product.name}} Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst - - -This directory contains samples for {{product.name}}. {{product.description}} - -{{description}} - -.. _{{product.name}}: {{product.url}} - -{% if required_api_url %} -To run the sample, you need to enable the API at: {{required_api_url}} -{% endif %} - -{% if required_role %} -To run the sample, you need to have `{{required_role}}` role. -{% endif %} - -{{other_required_steps}} - -{% if setup %} -Setup -------------------------------------------------------------------------------- - -{% for section in setup %} - -{% include section + '.tmpl.rst' %} - -{% endfor %} -{% endif %} - -{% if samples %} -Samples -------------------------------------------------------------------------------- - -{% for sample in samples %} -{{sample.name}} -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -{% if not sample.hide_cloudshell_button %} -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst -{% endif %} - - -{{sample.description}} - -To run this sample: - -.. code-block:: bash - - $ python {{sample.file}} -{% if sample.show_help %} - - {{get_help(sample.file)|indent}} -{% endif %} - - -{% endfor %} -{% endif %} - -{% if cloud_client_library %} - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - -{% endif %} - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst deleted file mode 100644 index 1446b94..0000000 --- a/scripts/readme-gen/templates/auth.tmpl.rst +++ /dev/null @@ -1,9 +0,0 @@ -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst deleted file mode 100644 index 11957ce..0000000 --- a/scripts/readme-gen/templates/auth_api_key.tmpl.rst +++ /dev/null @@ -1,14 +0,0 @@ -Authentication -++++++++++++++ - -Authentication for this service is done via an `API Key`_. To obtain an API -Key: - -1. Open the `Cloud Platform Console`_ -2. Make sure that billing is enabled for your project. -3. From the **Credentials** page, create a new **API Key** or use an existing - one for your project. - -.. _API Key: - https://developers.google.com/api-client-library/python/guide/aaa_apikeys -.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst deleted file mode 100644 index 6f069c6..0000000 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ /dev/null @@ -1,29 +0,0 @@ -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 3.7+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst deleted file mode 100644 index 5ea33d1..0000000 --- a/scripts/readme-gen/templates/install_portaudio.tmpl.rst +++ /dev/null @@ -1,35 +0,0 @@ -Install PortAudio -+++++++++++++++++ - -Install `PortAudio`_. This is required by the `PyAudio`_ library to stream -audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the -platform. - -* For Mac OS X, you can use `Homebrew`_:: - - brew install portaudio - - **Note**: if you encounter an error when running `pip install` that indicates - it can't find `portaudio.h`, try running `pip install` with the following - flags:: - - pip install --global-option='build_ext' \ - --global-option='-I/usr/local/include' \ - --global-option='-L/usr/local/lib' \ - pyaudio - -* For Debian / Ubuntu Linux:: - - apt-get install portaudio19-dev python-all-dev - -* Windows may work without having to install PortAudio explicitly (it will get - installed with PyAudio). - -For more details, see the `PyAudio installation`_ page. - - -.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ -.. _PortAudio: http://www.portaudio.com/ -.. _PyAudio installation: - https://people.csail.mit.edu/hubert/pyaudio/#downloads -.. _Homebrew: http://brew.sh diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index c3a2b39..0000000 --- a/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/setup.py b/setup.py deleted file mode 100644 index a8ab221..0000000 --- a/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = "google-cloud-memcache" - - -description = "Google Cloud Memcache API client library" - -version = {} -with open(os.path.join(package_root, "google/cloud/memcache/gapic_version.py")) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-memcache" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/testing/.gitignore b/testing/.gitignore deleted file mode 100644 index b05fbd6..0000000 --- a/testing/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -test-env.sh -service-account.json -client-secrets.json \ No newline at end of file diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adf..0000000 --- a/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/tests/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/tests/unit/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/tests/unit/gapic/memcache_v1/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py deleted file mode 100644 index 65c1125..0000000 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ /dev/null @@ -1,7415 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -from collections.abc import Iterable -import json -import math - -from google.api_core import ( - future, - gapic_v1, - grpc_helpers, - grpc_helpers_async, - operation, - operations_v1, - path_template, -) -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import operation_async # type: ignore -import google.auth -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import json_format -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import grpc -from grpc.experimental import aio -from proto.marshal.rules import wrappers -from proto.marshal.rules.dates import DurationRule, TimestampRule -import pytest -from requests import PreparedRequest, Request, Response -from requests.sessions import Session - -from google.cloud.memcache_v1.services.cloud_memcache import ( - CloudMemcacheAsyncClient, - CloudMemcacheClient, - pagers, - transports, -) -from google.cloud.memcache_v1.types import cloud_memcache - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudMemcacheClient._get_default_mtls_endpoint(None) is None - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(api_endpoint) - == api_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - ) - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (CloudMemcacheClient, "grpc"), - (CloudMemcacheAsyncClient, "grpc_asyncio"), - (CloudMemcacheClient, "rest"), - ], -) -def test_cloud_memcache_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "memcache.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://memcache.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_class,transport_name", - [ - (transports.CloudMemcacheGrpcTransport, "grpc"), - (transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CloudMemcacheRestTransport, "rest"), - ], -) -def test_cloud_memcache_client_service_account_always_use_jwt( - transport_class, transport_name -): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (CloudMemcacheClient, "grpc"), - (CloudMemcacheAsyncClient, "grpc_asyncio"), - (CloudMemcacheClient, "rest"), - ], -) -def test_cloud_memcache_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "memcache.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://memcache.googleapis.com" - ) - - -def test_cloud_memcache_client_get_transport_class(): - transport = CloudMemcacheClient.get_transport_class() - available_transports = [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheRestTransport, - ] - assert transport in available_transports - - transport = CloudMemcacheClient.get_transport_class("grpc") - assert transport == transports.CloudMemcacheGrpcTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest"), - ], -) -@mock.patch.object( - CloudMemcacheClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheClient), -) -@mock.patch.object( - CloudMemcacheAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheAsyncClient), -) -def test_cloud_memcache_client_client_options( - client_class, transport_class, transport_name -): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions( - api_audience="https://language.googleapis.com" - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com", - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,use_client_cert_env", - [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "true"), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "false"), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", "true"), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", "false"), - ], -) -@mock.patch.object( - CloudMemcacheClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheClient), -) -@mock.patch.object( - CloudMemcacheAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheAsyncClient), -) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_memcache_client_mtls_env_auto( - client_class, transport_class, transport_name, use_client_cert_env -): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=client_cert_source_callback, - ): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient] -) -@mock.patch.object( - CloudMemcacheClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheClient), -) -@mock.patch.object( - CloudMemcacheAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheAsyncClient), -) -def test_cloud_memcache_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_client_cert_source, - ): - ( - api_endpoint, - cert_source, - ) = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest"), - ], -) -def test_cloud_memcache_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - CloudMemcacheClient, - transports.CloudMemcacheGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", None), - ], -) -def test_cloud_memcache_client_client_options_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -def test_cloud_memcache_client_client_options_from_dict(): - with mock.patch( - "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = CloudMemcacheClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - CloudMemcacheClient, - transports.CloudMemcacheGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_cloud_memcache_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="memcache.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ListInstancesRequest, - dict, - ], -) -def test_list_instances(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -def test_list_instances_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - client.list_instances() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() - - -@pytest.mark.asyncio -async def test_list_instances_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - response = await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -@pytest.mark.asyncio -async def test_list_instances_async_from_dict(): - await test_list_instances_async(request_type=dict) - - -def test_list_instances_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ListInstancesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - call.return_value = cloud_memcache.ListInstancesResponse() - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_instances_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ListInstancesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.ListInstancesResponse() - ) - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_instances_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instances( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_instances_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - cloud_memcache.ListInstancesRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_instances_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.ListInstancesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.ListInstancesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instances( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_instances_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instances( - cloud_memcache.ListInstancesRequest(), - parent="parent_value", - ) - - -def test_list_instances_pager(transport_name: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_instances(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_memcache.Instance) for i in results) - - -def test_list_instances_pages(transport_name: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instances( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_memcache.Instance) for i in responses) - - -@pytest.mark.asyncio -async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in ( - await client.list_instances(request={}) - ).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.GetInstanceRequest, - dict, - ], -) -def test_get_instance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.Instance( - name="name_value", - display_name="display_name_value", - authorized_network="authorized_network_value", - zones=["zones_value"], - node_count=1070, - memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, - state=cloud_memcache.Instance.State.CREATING, - memcache_full_version="memcache_full_version_value", - discovery_endpoint="discovery_endpoint_value", - ) - response = client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - - -def test_get_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - client.get_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() - - -@pytest.mark.asyncio -async def test_get_instance_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.Instance( - name="name_value", - display_name="display_name_value", - authorized_network="authorized_network_value", - zones=["zones_value"], - node_count=1070, - memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, - state=cloud_memcache.Instance.State.CREATING, - memcache_full_version="memcache_full_version_value", - discovery_endpoint="discovery_endpoint_value", - ) - ) - response = await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - - -@pytest.mark.asyncio -async def test_get_instance_async_from_dict(): - await test_get_instance_async(request_type=dict) - - -def test_get_instance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.GetInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - call.return_value = cloud_memcache.Instance() - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.GetInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.Instance() - ) - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_instance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.Instance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_instance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - cloud_memcache.GetInstanceRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.Instance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.Instance() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance( - cloud_memcache.GetInstanceRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.CreateInstanceRequest, - dict, - ], -) -def test_create_instance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - client.create_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() - - -@pytest.mark.asyncio -async def test_create_instance_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_async_from_dict(): - await test_create_instance_async(request_type=dict) - - -def test_create_instance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.CreateInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.CreateInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_instance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance( - parent="parent_value", - instance=cloud_memcache.Instance(name="name_value"), - instance_id="instance_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].instance - mock_val = cloud_memcache.Instance(name="name_value") - assert arg == mock_val - arg = args[0].instance_id - mock_val = "instance_id_value" - assert arg == mock_val - - -def test_create_instance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - cloud_memcache.CreateInstanceRequest(), - parent="parent_value", - instance=cloud_memcache.Instance(name="name_value"), - instance_id="instance_id_value", - ) - - -@pytest.mark.asyncio -async def test_create_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance( - parent="parent_value", - instance=cloud_memcache.Instance(name="name_value"), - instance_id="instance_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].instance - mock_val = cloud_memcache.Instance(name="name_value") - assert arg == mock_val - arg = args[0].instance_id - mock_val = "instance_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance( - cloud_memcache.CreateInstanceRequest(), - parent="parent_value", - instance=cloud_memcache.Instance(name="name_value"), - instance_id="instance_id_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.UpdateInstanceRequest, - dict, - ], -) -def test_update_instance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - client.update_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() - - -@pytest.mark.asyncio -async def test_update_instance_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_async_from_dict(): - await test_update_instance_async(request_type=dict) - - -def test_update_instance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.UpdateInstanceRequest() - - request.instance.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "instance.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.UpdateInstanceRequest() - - request.instance.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "instance.name=name_value", - ) in kw["metadata"] - - -def test_update_instance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance( - instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = cloud_memcache.Instance(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -def test_update_instance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - cloud_memcache.UpdateInstanceRequest(), - instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance( - instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = cloud_memcache.Instance(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance( - cloud_memcache.UpdateInstanceRequest(), - instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.UpdateParametersRequest, - dict, - ], -) -def test_update_parameters(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_parameters_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - client.update_parameters() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() - - -@pytest.mark.asyncio -async def test_update_parameters_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_parameters_async_from_dict(): - await test_update_parameters_async(request_type=dict) - - -def test_update_parameters_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.UpdateParametersRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.UpdateParametersRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_update_parameters_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_parameters( - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - arg = args[0].parameters - mock_val = cloud_memcache.MemcacheParameters(id="id_value") - assert arg == mock_val - - -def test_update_parameters_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_parameters( - cloud_memcache.UpdateParametersRequest(), - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - -@pytest.mark.asyncio -async def test_update_parameters_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_parameters( - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - arg = args[0].parameters - mock_val = cloud_memcache.MemcacheParameters(id="id_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_parameters( - cloud_memcache.UpdateParametersRequest(), - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.DeleteInstanceRequest, - dict, - ], -) -def test_delete_instance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - client.delete_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() - - -@pytest.mark.asyncio -async def test_delete_instance_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_instance_async_from_dict(): - await test_delete_instance_async(request_type=dict) - - -def test_delete_instance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.DeleteInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.DeleteInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_instance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_instance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ApplyParametersRequest, - dict, - ], -) -def test_apply_parameters(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_apply_parameters_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - client.apply_parameters() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() - - -@pytest.mark.asyncio -async def test_apply_parameters_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_apply_parameters_async_from_dict(): - await test_apply_parameters_async(request_type=dict) - - -def test_apply_parameters_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ApplyParametersRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_apply_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ApplyParametersRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_apply_parameters_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.apply_parameters( - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].node_ids - mock_val = ["node_ids_value"] - assert arg == mock_val - arg = args[0].apply_all - mock_val = True - assert arg == mock_val - - -def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.apply_parameters( - cloud_memcache.ApplyParametersRequest(), - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -@pytest.mark.asyncio -async def test_apply_parameters_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.apply_parameters( - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].node_ids - mock_val = ["node_ids_value"] - assert arg == mock_val - arg = args[0].apply_all - mock_val = True - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_apply_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.apply_parameters( - cloud_memcache.ApplyParametersRequest(), - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.RescheduleMaintenanceRequest, - dict, - ], -) -def test_reschedule_maintenance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.reschedule_maintenance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_reschedule_maintenance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - client.reschedule_maintenance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_async( - transport: str = "grpc_asyncio", - request_type=cloud_memcache.RescheduleMaintenanceRequest, -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.reschedule_maintenance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_async_from_dict(): - await test_reschedule_maintenance_async(request_type=dict) - - -def test_reschedule_maintenance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.RescheduleMaintenanceRequest() - - request.instance = "instance_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.reschedule_maintenance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "instance=instance_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.RescheduleMaintenanceRequest() - - request.instance = "instance_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.reschedule_maintenance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "instance=instance_value", - ) in kw["metadata"] - - -def test_reschedule_maintenance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.reschedule_maintenance( - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = "instance_value" - assert arg == mock_val - arg = args[0].reschedule_type - mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE - assert arg == mock_val - assert TimestampRule().to_proto( - args[0].schedule_time - ) == timestamp_pb2.Timestamp(seconds=751) - - -def test_reschedule_maintenance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reschedule_maintenance( - cloud_memcache.RescheduleMaintenanceRequest(), - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.reschedule_maintenance( - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = "instance_value" - assert arg == mock_val - arg = args[0].reschedule_type - mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE - assert arg == mock_val - assert TimestampRule().to_proto( - args[0].schedule_time - ) == timestamp_pb2.Timestamp(seconds=751) - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.reschedule_maintenance( - cloud_memcache.RescheduleMaintenanceRequest(), - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ListInstancesRequest, - dict, - ], -) -def test_list_instances_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_instances(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -def test_list_instances_rest_required_fields( - request_type=cloud_memcache.ListInstancesRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_instances._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_instances._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.ListInstancesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_instances(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_instances_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_list_instances" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_list_instances" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.ListInstancesRequest.pb( - cloud_memcache.ListInstancesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_memcache.ListInstancesResponse.to_json( - cloud_memcache.ListInstancesResponse() - ) - - request = cloud_memcache.ListInstancesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_memcache.ListInstancesResponse() - - client.list_instances( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_instances_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.ListInstancesRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_instances(request) - - -def test_list_instances_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.ListInstancesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_instances(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, - args[1], - ) - - -def test_list_instances_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - cloud_memcache.ListInstancesRequest(), - parent="parent_value", - ) - - -def test_list_instances_rest_pager(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - instances=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_memcache.ListInstancesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_instances(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_memcache.Instance) for i in results) - - pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.GetInstanceRequest, - dict, - ], -) -def test_get_instance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.Instance( - name="name_value", - display_name="display_name_value", - authorized_network="authorized_network_value", - zones=["zones_value"], - node_count=1070, - memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, - state=cloud_memcache.Instance.State.CREATING, - memcache_full_version="memcache_full_version_value", - discovery_endpoint="discovery_endpoint_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_memcache.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - - -def test_get_instance_rest_required_fields( - request_type=cloud_memcache.GetInstanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.Instance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloud_memcache.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_instance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_get_instance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_get_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.GetInstanceRequest.pb( - cloud_memcache.GetInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_memcache.Instance.to_json( - cloud_memcache.Instance() - ) - - request = cloud_memcache.GetInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_memcache.Instance() - - client.get_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_instance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.GetInstanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_instance(request) - - -def test_get_instance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.Instance() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_memcache.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, - args[1], - ) - - -def test_get_instance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - cloud_memcache.GetInstanceRequest(), - name="name_value", - ) - - -def test_get_instance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.CreateInstanceRequest, - dict, - ], -) -def test_create_instance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["instance"] = { - "name": "name_value", - "display_name": "display_name_value", - "labels": {}, - "authorized_network": "authorized_network_value", - "zones": ["zones_value1", "zones_value2"], - "node_count": 1070, - "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, - "memcache_version": 1, - "parameters": {"id": "id_value", "params": {}}, - "memcache_nodes": [ - { - "node_id": "node_id_value", - "zone": "zone_value", - "state": 1, - "host": "host_value", - "port": 453, - "parameters": {}, - } - ], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "memcache_full_version": "memcache_full_version_value", - "instance_messages": [{"code": 1, "message": "message_value"}], - "discovery_endpoint": "discovery_endpoint_value", - "maintenance_policy": { - "create_time": {}, - "update_time": {}, - "description": "description_value", - "weekly_maintenance_window": [ - { - "day": 1, - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "duration": {"seconds": 751, "nanos": 543}, - } - ], - }, - "maintenance_schedule": { - "start_time": {}, - "end_time": {}, - "schedule_deadline_time": {}, - }, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_instance_rest_required_fields( - request_type=cloud_memcache.CreateInstanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["instance_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - assert "instanceId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == request_init["instance_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["instanceId"] = "instance_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == "instance_id_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_instance(request) - - expected_params = [ - ( - "instanceId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_instance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("instanceId",)) - & set( - ( - "parent", - "instanceId", - "instance", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_create_instance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_create_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.CreateInstanceRequest.pb( - cloud_memcache.CreateInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.CreateInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_instance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.CreateInstanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["instance"] = { - "name": "name_value", - "display_name": "display_name_value", - "labels": {}, - "authorized_network": "authorized_network_value", - "zones": ["zones_value1", "zones_value2"], - "node_count": 1070, - "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, - "memcache_version": 1, - "parameters": {"id": "id_value", "params": {}}, - "memcache_nodes": [ - { - "node_id": "node_id_value", - "zone": "zone_value", - "state": 1, - "host": "host_value", - "port": 453, - "parameters": {}, - } - ], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "memcache_full_version": "memcache_full_version_value", - "instance_messages": [{"code": 1, "message": "message_value"}], - "discovery_endpoint": "discovery_endpoint_value", - "maintenance_policy": { - "create_time": {}, - "update_time": {}, - "description": "description_value", - "weekly_maintenance_window": [ - { - "day": 1, - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "duration": {"seconds": 751, "nanos": 543}, - } - ], - }, - "maintenance_schedule": { - "start_time": {}, - "end_time": {}, - "schedule_deadline_time": {}, - }, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_instance(request) - - -def test_create_instance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - instance=cloud_memcache.Instance(name="name_value"), - instance_id="instance_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, - args[1], - ) - - -def test_create_instance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - cloud_memcache.CreateInstanceRequest(), - parent="parent_value", - instance=cloud_memcache.Instance(name="name_value"), - instance_id="instance_id_value", - ) - - -def test_create_instance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.UpdateInstanceRequest, - dict, - ], -) -def test_update_instance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - request_init["instance"] = { - "name": "projects/sample1/locations/sample2/instances/sample3", - "display_name": "display_name_value", - "labels": {}, - "authorized_network": "authorized_network_value", - "zones": ["zones_value1", "zones_value2"], - "node_count": 1070, - "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, - "memcache_version": 1, - "parameters": {"id": "id_value", "params": {}}, - "memcache_nodes": [ - { - "node_id": "node_id_value", - "zone": "zone_value", - "state": 1, - "host": "host_value", - "port": 453, - "parameters": {}, - } - ], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "memcache_full_version": "memcache_full_version_value", - "instance_messages": [{"code": 1, "message": "message_value"}], - "discovery_endpoint": "discovery_endpoint_value", - "maintenance_policy": { - "create_time": {}, - "update_time": {}, - "description": "description_value", - "weekly_maintenance_window": [ - { - "day": 1, - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "duration": {"seconds": 751, "nanos": 543}, - } - ], - }, - "maintenance_schedule": { - "start_time": {}, - "end_time": {}, - "schedule_deadline_time": {}, - }, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_instance_rest_required_fields( - request_type=cloud_memcache.UpdateInstanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_instance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "instance", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_update_instance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_update_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.UpdateInstanceRequest.pb( - cloud_memcache.UpdateInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.UpdateInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_instance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.UpdateInstanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - request_init["instance"] = { - "name": "projects/sample1/locations/sample2/instances/sample3", - "display_name": "display_name_value", - "labels": {}, - "authorized_network": "authorized_network_value", - "zones": ["zones_value1", "zones_value2"], - "node_count": 1070, - "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, - "memcache_version": 1, - "parameters": {"id": "id_value", "params": {}}, - "memcache_nodes": [ - { - "node_id": "node_id_value", - "zone": "zone_value", - "state": 1, - "host": "host_value", - "port": 453, - "parameters": {}, - } - ], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "memcache_full_version": "memcache_full_version_value", - "instance_messages": [{"code": 1, "message": "message_value"}], - "discovery_endpoint": "discovery_endpoint_value", - "maintenance_policy": { - "create_time": {}, - "update_time": {}, - "description": "description_value", - "weekly_maintenance_window": [ - { - "day": 1, - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "duration": {"seconds": 751, "nanos": 543}, - } - ], - }, - "maintenance_schedule": { - "start_time": {}, - "end_time": {}, - "schedule_deadline_time": {}, - }, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_instance(request) - - -def test_update_instance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - - # get truthy value for each flattened field - mock_args = dict( - instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{instance.name=projects/*/locations/*/instances/*}" - % client.transport._host, - args[1], - ) - - -def test_update_instance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - cloud_memcache.UpdateInstanceRequest(), - instance=cloud_memcache.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_instance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.UpdateParametersRequest, - dict, - ], -) -def test_update_parameters_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_parameters(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_parameters_rest_required_fields( - request_type=cloud_memcache.UpdateParametersRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_parameters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_parameters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_parameters(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_parameters_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_parameters._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_parameters_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_update_parameters" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_update_parameters" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.UpdateParametersRequest.pb( - cloud_memcache.UpdateParametersRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.UpdateParametersRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_parameters( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_parameters_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.UpdateParametersRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_parameters(request) - - -def test_update_parameters_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_parameters(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/instances/*}:updateParameters" - % client.transport._host, - args[1], - ) - - -def test_update_parameters_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_parameters( - cloud_memcache.UpdateParametersRequest(), - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - -def test_update_parameters_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.DeleteInstanceRequest, - dict, - ], -) -def test_delete_instance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_instance_rest_required_fields( - request_type=cloud_memcache.DeleteInstanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_instance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_delete_instance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_delete_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.DeleteInstanceRequest.pb( - cloud_memcache.DeleteInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.DeleteInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_instance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.DeleteInstanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_instance(request) - - -def test_delete_instance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, - args[1], - ) - - -def test_delete_instance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), - name="name_value", - ) - - -def test_delete_instance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ApplyParametersRequest, - dict, - ], -) -def test_apply_parameters_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.apply_parameters(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_apply_parameters_rest_required_fields( - request_type=cloud_memcache.ApplyParametersRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).apply_parameters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).apply_parameters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.apply_parameters(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_apply_parameters_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.apply_parameters._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_apply_parameters_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_apply_parameters" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_apply_parameters" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.ApplyParametersRequest.pb( - cloud_memcache.ApplyParametersRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.ApplyParametersRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.apply_parameters( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_apply_parameters_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.ApplyParametersRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.apply_parameters(request) - - -def test_apply_parameters_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.apply_parameters(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/instances/*}:applyParameters" - % client.transport._host, - args[1], - ) - - -def test_apply_parameters_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.apply_parameters( - cloud_memcache.ApplyParametersRequest(), - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -def test_apply_parameters_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.RescheduleMaintenanceRequest, - dict, - ], -) -def test_reschedule_maintenance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.reschedule_maintenance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_reschedule_maintenance_rest_required_fields( - request_type=cloud_memcache.RescheduleMaintenanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["instance"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).reschedule_maintenance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["instance"] = "instance_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).reschedule_maintenance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "instance" in jsonified_request - assert jsonified_request["instance"] == "instance_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.reschedule_maintenance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_reschedule_maintenance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "instance", - "rescheduleType", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reschedule_maintenance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_reschedule_maintenance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_reschedule_maintenance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.RescheduleMaintenanceRequest.pb( - cloud_memcache.RescheduleMaintenanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.RescheduleMaintenanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.reschedule_maintenance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reschedule_maintenance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.RescheduleMaintenanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reschedule_maintenance(request) - - -def test_reschedule_maintenance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "instance": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.reschedule_maintenance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{instance=projects/*/locations/*/instances/*}:rescheduleMaintenance" - % client.transport._host, - args[1], - ) - - -def test_reschedule_maintenance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reschedule_maintenance( - cloud_memcache.RescheduleMaintenanceRequest(), - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -def test_reschedule_maintenance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudMemcacheClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - transports.CloudMemcacheRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = CloudMemcacheClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudMemcacheGrpcTransport, - ) - - -def test_cloud_memcache_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_cloud_memcache_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_instances", - "get_instance", - "create_instance", - "update_instance", - "update_parameters", - "delete_instance", - "apply_parameters", - "reschedule_maintenance", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_cloud_memcache_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_cloud_memcache_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport() - adc.assert_called_once() - - -def test_cloud_memcache_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudMemcacheClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - transports.CloudMemcacheRestTransport, - ], -) -def test_cloud_memcache_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="memcache.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_cloud_memcache_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.CloudMemcacheRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_cloud_memcache_rest_lro_client(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_cloud_memcache_host_no_port(transport_name): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="memcache.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "memcache.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://memcache.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_cloud_memcache_host_with_port(transport_name): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="memcache.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "memcache.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://memcache.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_cloud_memcache_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CloudMemcacheClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CloudMemcacheClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_instances._session - session2 = client2.transport.list_instances._session - assert session1 != session2 - session1 = client1.transport.get_instance._session - session2 = client2.transport.get_instance._session - assert session1 != session2 - session1 = client1.transport.create_instance._session - session2 = client2.transport.create_instance._session - assert session1 != session2 - session1 = client1.transport.update_instance._session - session2 = client2.transport.update_instance._session - assert session1 != session2 - session1 = client1.transport.update_parameters._session - session2 = client2.transport.update_parameters._session - assert session1 != session2 - session1 = client1.transport.delete_instance._session - session2 = client2.transport.delete_instance._session - assert session1 != session2 - session1 = client1.transport.apply_parameters._session - session2 = client2.transport.apply_parameters._session - assert session1 != session2 - session1 = client1.transport.reschedule_maintenance._session - session2 = client2.transport.reschedule_maintenance._session - assert session1 != session2 - - -def test_cloud_memcache_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cloud_memcache_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudMemcacheGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cloud_memcache_grpc_lro_client(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_cloud_memcache_grpc_lro_async_client(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_path(): - project = "squid" - location = "clam" - instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, - location=location, - instance=instance, - ) - actual = CloudMemcacheClient.instance_path(project, location, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", - } - path = CloudMemcacheClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_instance_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = CloudMemcacheClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = CloudMemcacheClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = CloudMemcacheClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = CloudMemcacheClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = CloudMemcacheClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = CloudMemcacheClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = CloudMemcacheClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = CloudMemcacheClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = CloudMemcacheClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = CloudMemcacheClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.CloudMemcacheTransport, "_prep_wrapped_messages" - ) as prep: - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.CloudMemcacheTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = CloudMemcacheClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_delete_operation_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_operation_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_cancel_operation_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_get_operation_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_get_operation_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_list_operations_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_list_operations_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_list_locations_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_list_locations_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_get_location_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] - - -def test_get_location_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - "rest", - "grpc", - ] - for transport in transports: - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - - -@pytest.mark.parametrize( - "client_class,transport_class", - [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport), - (CloudMemcacheAsyncClient, transports.CloudMemcacheGrpcAsyncIOTransport), - ], -) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/tests/unit/gapic/memcache_v1beta2/__init__.py b/tests/unit/gapic/memcache_v1beta2/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/tests/unit/gapic/memcache_v1beta2/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py deleted file mode 100644 index 27b0a13..0000000 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ /dev/null @@ -1,7968 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -from collections.abc import Iterable -import json -import math - -from google.api_core import ( - future, - gapic_v1, - grpc_helpers, - grpc_helpers_async, - operation, - operations_v1, - path_template, -) -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import operation_async # type: ignore -import google.auth -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import json_format -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import grpc -from grpc.experimental import aio -from proto.marshal.rules import wrappers -from proto.marshal.rules.dates import DurationRule, TimestampRule -import pytest -from requests import PreparedRequest, Request, Response -from requests.sessions import Session - -from google.cloud.memcache_v1beta2.services.cloud_memcache import ( - CloudMemcacheAsyncClient, - CloudMemcacheClient, - pagers, - transports, -) -from google.cloud.memcache_v1beta2.types import cloud_memcache - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudMemcacheClient._get_default_mtls_endpoint(None) is None - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(api_endpoint) - == api_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - CloudMemcacheClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - ) - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (CloudMemcacheClient, "grpc"), - (CloudMemcacheAsyncClient, "grpc_asyncio"), - (CloudMemcacheClient, "rest"), - ], -) -def test_cloud_memcache_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "memcache.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://memcache.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_class,transport_name", - [ - (transports.CloudMemcacheGrpcTransport, "grpc"), - (transports.CloudMemcacheGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CloudMemcacheRestTransport, "rest"), - ], -) -def test_cloud_memcache_client_service_account_always_use_jwt( - transport_class, transport_name -): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (CloudMemcacheClient, "grpc"), - (CloudMemcacheAsyncClient, "grpc_asyncio"), - (CloudMemcacheClient, "rest"), - ], -) -def test_cloud_memcache_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "memcache.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://memcache.googleapis.com" - ) - - -def test_cloud_memcache_client_get_transport_class(): - transport = CloudMemcacheClient.get_transport_class() - available_transports = [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheRestTransport, - ] - assert transport in available_transports - - transport = CloudMemcacheClient.get_transport_class("grpc") - assert transport == transports.CloudMemcacheGrpcTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest"), - ], -) -@mock.patch.object( - CloudMemcacheClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheClient), -) -@mock.patch.object( - CloudMemcacheAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheAsyncClient), -) -def test_cloud_memcache_client_client_options( - client_class, transport_class, transport_name -): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions( - api_audience="https://language.googleapis.com" - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com", - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,use_client_cert_env", - [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "true"), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "false"), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", "true"), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", "false"), - ], -) -@mock.patch.object( - CloudMemcacheClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheClient), -) -@mock.patch.object( - CloudMemcacheAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheAsyncClient), -) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_memcache_client_mtls_env_auto( - client_class, transport_class, transport_name, use_client_cert_env -): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=client_cert_source_callback, - ): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient] -) -@mock.patch.object( - CloudMemcacheClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheClient), -) -@mock.patch.object( - CloudMemcacheAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudMemcacheAsyncClient), -) -def test_cloud_memcache_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_client_cert_source, - ): - ( - api_endpoint, - cert_source, - ) = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest"), - ], -) -def test_cloud_memcache_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - CloudMemcacheClient, - transports.CloudMemcacheGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - (CloudMemcacheClient, transports.CloudMemcacheRestTransport, "rest", None), - ], -) -def test_cloud_memcache_client_client_options_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -def test_cloud_memcache_client_client_options_from_dict(): - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = CloudMemcacheClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - CloudMemcacheClient, - transports.CloudMemcacheGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - CloudMemcacheAsyncClient, - transports.CloudMemcacheGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_cloud_memcache_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="memcache.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ListInstancesRequest, - dict, - ], -) -def test_list_instances(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -def test_list_instances_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - client.list_instances() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() - - -@pytest.mark.asyncio -async def test_list_instances_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - response = await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ListInstancesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -@pytest.mark.asyncio -async def test_list_instances_async_from_dict(): - await test_list_instances_async(request_type=dict) - - -def test_list_instances_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ListInstancesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - call.return_value = cloud_memcache.ListInstancesResponse() - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_instances_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ListInstancesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.ListInstancesResponse() - ) - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_instances_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instances( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_instances_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - cloud_memcache.ListInstancesRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_instances_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.ListInstancesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.ListInstancesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instances( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_instances_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instances( - cloud_memcache.ListInstancesRequest(), - parent="parent_value", - ) - - -def test_list_instances_pager(transport_name: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - resources=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_instances(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_memcache.Instance) for i in results) - - -def test_list_instances_pages(transport_name: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_instances), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - resources=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_instances_async_pager(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - resources=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instances( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, cloud_memcache.Instance) for i in responses) - - -@pytest.mark.asyncio -async def test_list_instances_async_pages(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - resources=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in ( - await client.list_instances(request={}) - ).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.GetInstanceRequest, - dict, - ], -) -def test_get_instance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.Instance( - name="name_value", - display_name="display_name_value", - authorized_network="authorized_network_value", - zones=["zones_value"], - node_count=1070, - memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, - state=cloud_memcache.Instance.State.CREATING, - memcache_full_version="memcache_full_version_value", - discovery_endpoint="discovery_endpoint_value", - update_available=True, - ) - response = client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - assert response.update_available is True - - -def test_get_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - client.get_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() - - -@pytest.mark.asyncio -async def test_get_instance_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.Instance( - name="name_value", - display_name="display_name_value", - authorized_network="authorized_network_value", - zones=["zones_value"], - node_count=1070, - memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, - state=cloud_memcache.Instance.State.CREATING, - memcache_full_version="memcache_full_version_value", - discovery_endpoint="discovery_endpoint_value", - update_available=True, - ) - ) - response = await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.GetInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - assert response.update_available is True - - -@pytest.mark.asyncio -async def test_get_instance_async_from_dict(): - await test_get_instance_async(request_type=dict) - - -def test_get_instance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.GetInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - call.return_value = cloud_memcache.Instance() - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.GetInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.Instance() - ) - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_instance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.Instance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_instance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - cloud_memcache.GetInstanceRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = cloud_memcache.Instance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloud_memcache.Instance() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance( - cloud_memcache.GetInstanceRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.CreateInstanceRequest, - dict, - ], -) -def test_create_instance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - client.create_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() - - -@pytest.mark.asyncio -async def test_create_instance_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.CreateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_async_from_dict(): - await test_create_instance_async(request_type=dict) - - -def test_create_instance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.CreateInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.CreateInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_instance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance( - parent="parent_value", - instance_id="instance_id_value", - resource=cloud_memcache.Instance(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].instance_id - mock_val = "instance_id_value" - assert arg == mock_val - arg = args[0].resource - mock_val = cloud_memcache.Instance(name="name_value") - assert arg == mock_val - - -def test_create_instance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - cloud_memcache.CreateInstanceRequest(), - parent="parent_value", - instance_id="instance_id_value", - resource=cloud_memcache.Instance(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_create_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance( - parent="parent_value", - instance_id="instance_id_value", - resource=cloud_memcache.Instance(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].instance_id - mock_val = "instance_id_value" - assert arg == mock_val - arg = args[0].resource - mock_val = cloud_memcache.Instance(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance( - cloud_memcache.CreateInstanceRequest(), - parent="parent_value", - instance_id="instance_id_value", - resource=cloud_memcache.Instance(name="name_value"), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.UpdateInstanceRequest, - dict, - ], -) -def test_update_instance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - client.update_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() - - -@pytest.mark.asyncio -async def test_update_instance_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_async_from_dict(): - await test_update_instance_async(request_type=dict) - - -def test_update_instance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.UpdateInstanceRequest() - - request.resource.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "resource.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.UpdateInstanceRequest() - - request.resource.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "resource.name=name_value", - ) in kw["metadata"] - - -def test_update_instance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - resource=cloud_memcache.Instance(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - arg = args[0].resource - mock_val = cloud_memcache.Instance(name="name_value") - assert arg == mock_val - - -def test_update_instance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - resource=cloud_memcache.Instance(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_update_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - resource=cloud_memcache.Instance(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - arg = args[0].resource - mock_val = cloud_memcache.Instance(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance( - cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - resource=cloud_memcache.Instance(name="name_value"), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.UpdateParametersRequest, - dict, - ], -) -def test_update_parameters(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_parameters_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - client.update_parameters() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() - - -@pytest.mark.asyncio -async def test_update_parameters_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.UpdateParametersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_parameters_async_from_dict(): - await test_update_parameters_async(request_type=dict) - - -def test_update_parameters_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.UpdateParametersRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.UpdateParametersRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_update_parameters_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_parameters( - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - arg = args[0].parameters - mock_val = cloud_memcache.MemcacheParameters(id="id_value") - assert arg == mock_val - - -def test_update_parameters_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_parameters( - cloud_memcache.UpdateParametersRequest(), - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - -@pytest.mark.asyncio -async def test_update_parameters_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_parameters), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_parameters( - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - arg = args[0].parameters - mock_val = cloud_memcache.MemcacheParameters(id="id_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_parameters( - cloud_memcache.UpdateParametersRequest(), - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.DeleteInstanceRequest, - dict, - ], -) -def test_delete_instance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - client.delete_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() - - -@pytest.mark.asyncio -async def test_delete_instance_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.DeleteInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_instance_async_from_dict(): - await test_delete_instance_async(request_type=dict) - - -def test_delete_instance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.DeleteInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_instance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.DeleteInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_delete_instance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_instance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_instance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_instance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ApplyParametersRequest, - dict, - ], -) -def test_apply_parameters(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_apply_parameters_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - client.apply_parameters() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() - - -@pytest.mark.asyncio -async def test_apply_parameters_async( - transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplyParametersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_apply_parameters_async_from_dict(): - await test_apply_parameters_async(request_type=dict) - - -def test_apply_parameters_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ApplyParametersRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_apply_parameters_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ApplyParametersRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.apply_parameters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_apply_parameters_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.apply_parameters( - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].node_ids - mock_val = ["node_ids_value"] - assert arg == mock_val - arg = args[0].apply_all - mock_val = True - assert arg == mock_val - - -def test_apply_parameters_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.apply_parameters( - cloud_memcache.ApplyParametersRequest(), - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -@pytest.mark.asyncio -async def test_apply_parameters_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.apply_parameters( - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].node_ids - mock_val = ["node_ids_value"] - assert arg == mock_val - arg = args[0].apply_all - mock_val = True - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_apply_parameters_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.apply_parameters( - cloud_memcache.ApplyParametersRequest(), - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ApplySoftwareUpdateRequest, - dict, - ], -) -def test_apply_software_update(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_software_update), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.apply_software_update(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_apply_software_update_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_software_update), "__call__" - ) as call: - client.apply_software_update() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() - - -@pytest.mark.asyncio -async def test_apply_software_update_async( - transport: str = "grpc_asyncio", - request_type=cloud_memcache.ApplySoftwareUpdateRequest, -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_software_update), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.apply_software_update(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_apply_software_update_async_from_dict(): - await test_apply_software_update_async(request_type=dict) - - -def test_apply_software_update_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ApplySoftwareUpdateRequest() - - request.instance = "instance_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_software_update), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.apply_software_update(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "instance=instance_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_apply_software_update_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.ApplySoftwareUpdateRequest() - - request.instance = "instance_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_software_update), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.apply_software_update(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "instance=instance_value", - ) in kw["metadata"] - - -def test_apply_software_update_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_software_update), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.apply_software_update( - instance="instance_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = "instance_value" - assert arg == mock_val - arg = args[0].node_ids - mock_val = ["node_ids_value"] - assert arg == mock_val - arg = args[0].apply_all - mock_val = True - assert arg == mock_val - - -def test_apply_software_update_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.apply_software_update( - cloud_memcache.ApplySoftwareUpdateRequest(), - instance="instance_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -@pytest.mark.asyncio -async def test_apply_software_update_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.apply_software_update), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.apply_software_update( - instance="instance_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = "instance_value" - assert arg == mock_val - arg = args[0].node_ids - mock_val = ["node_ids_value"] - assert arg == mock_val - arg = args[0].apply_all - mock_val = True - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_apply_software_update_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.apply_software_update( - cloud_memcache.ApplySoftwareUpdateRequest(), - instance="instance_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.RescheduleMaintenanceRequest, - dict, - ], -) -def test_reschedule_maintenance(request_type, transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.reschedule_maintenance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_reschedule_maintenance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - client.reschedule_maintenance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_async( - transport: str = "grpc_asyncio", - request_type=cloud_memcache.RescheduleMaintenanceRequest, -): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.reschedule_maintenance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_memcache.RescheduleMaintenanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_async_from_dict(): - await test_reschedule_maintenance_async(request_type=dict) - - -def test_reschedule_maintenance_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.RescheduleMaintenanceRequest() - - request.instance = "instance_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.reschedule_maintenance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "instance=instance_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloud_memcache.RescheduleMaintenanceRequest() - - request.instance = "instance_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.reschedule_maintenance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "instance=instance_value", - ) in kw["metadata"] - - -def test_reschedule_maintenance_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.reschedule_maintenance( - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = "instance_value" - assert arg == mock_val - arg = args[0].reschedule_type - mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE - assert arg == mock_val - assert TimestampRule().to_proto( - args[0].schedule_time - ) == timestamp_pb2.Timestamp(seconds=751) - - -def test_reschedule_maintenance_flattened_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reschedule_maintenance( - cloud_memcache.RescheduleMaintenanceRequest(), - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_flattened_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.reschedule_maintenance( - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = "instance_value" - assert arg == mock_val - arg = args[0].reschedule_type - mock_val = cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE - assert arg == mock_val - assert TimestampRule().to_proto( - args[0].schedule_time - ) == timestamp_pb2.Timestamp(seconds=751) - - -@pytest.mark.asyncio -async def test_reschedule_maintenance_flattened_error_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.reschedule_maintenance( - cloud_memcache.RescheduleMaintenanceRequest(), - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ListInstancesRequest, - dict, - ], -) -def test_list_instances_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_instances(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -def test_list_instances_rest_required_fields( - request_type=cloud_memcache.ListInstancesRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_instances._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_instances._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.ListInstancesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_instances(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_instances_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_list_instances" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_list_instances" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.ListInstancesRequest.pb( - cloud_memcache.ListInstancesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_memcache.ListInstancesResponse.to_json( - cloud_memcache.ListInstancesResponse() - ) - - request = cloud_memcache.ListInstancesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_memcache.ListInstancesResponse() - - client.list_instances( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_instances_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.ListInstancesRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_instances(request) - - -def test_list_instances_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.ListInstancesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_memcache.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_instances(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{parent=projects/*/locations/*}/instances" - % client.transport._host, - args[1], - ) - - -def test_list_instances_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - cloud_memcache.ListInstancesRequest(), - parent="parent_value", - ) - - -def test_list_instances_rest_pager(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - next_page_token="abc", - ), - cloud_memcache.ListInstancesResponse( - resources=[], - next_page_token="def", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - ], - next_page_token="ghi", - ), - cloud_memcache.ListInstancesResponse( - resources=[ - cloud_memcache.Instance(), - cloud_memcache.Instance(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - cloud_memcache.ListInstancesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_instances(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, cloud_memcache.Instance) for i in results) - - pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.GetInstanceRequest, - dict, - ], -) -def test_get_instance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.Instance( - name="name_value", - display_name="display_name_value", - authorized_network="authorized_network_value", - zones=["zones_value"], - node_count=1070, - memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, - state=cloud_memcache.Instance.State.CREATING, - memcache_full_version="memcache_full_version_value", - discovery_endpoint="discovery_endpoint_value", - update_available=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_memcache.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_memcache.Instance) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.authorized_network == "authorized_network_value" - assert response.zones == ["zones_value"] - assert response.node_count == 1070 - assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 - assert response.state == cloud_memcache.Instance.State.CREATING - assert response.memcache_full_version == "memcache_full_version_value" - assert response.discovery_endpoint == "discovery_endpoint_value" - assert response.update_available is True - - -def test_get_instance_rest_required_fields( - request_type=cloud_memcache.GetInstanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.Instance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = cloud_memcache.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_instance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_get_instance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_get_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.GetInstanceRequest.pb( - cloud_memcache.GetInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_memcache.Instance.to_json( - cloud_memcache.Instance() - ) - - request = cloud_memcache.GetInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_memcache.Instance() - - client.get_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_instance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.GetInstanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_instance(request) - - -def test_get_instance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = cloud_memcache.Instance() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_memcache.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{name=projects/*/locations/*/instances/*}" - % client.transport._host, - args[1], - ) - - -def test_get_instance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - cloud_memcache.GetInstanceRequest(), - name="name_value", - ) - - -def test_get_instance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.CreateInstanceRequest, - dict, - ], -) -def test_create_instance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["resource"] = { - "name": "name_value", - "display_name": "display_name_value", - "labels": {}, - "authorized_network": "authorized_network_value", - "zones": ["zones_value1", "zones_value2"], - "node_count": 1070, - "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, - "memcache_version": 1, - "parameters": {"id": "id_value", "params": {}}, - "memcache_nodes": [ - { - "node_id": "node_id_value", - "zone": "zone_value", - "state": 1, - "host": "host_value", - "port": 453, - "parameters": {}, - "update_available": True, - } - ], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "memcache_full_version": "memcache_full_version_value", - "instance_messages": [{"code": 1, "message": "message_value"}], - "discovery_endpoint": "discovery_endpoint_value", - "update_available": True, - "maintenance_policy": { - "create_time": {}, - "update_time": {}, - "description": "description_value", - "weekly_maintenance_window": [ - { - "day": 1, - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "duration": {"seconds": 751, "nanos": 543}, - } - ], - }, - "maintenance_schedule": { - "start_time": {}, - "end_time": {}, - "schedule_deadline_time": {}, - }, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_instance_rest_required_fields( - request_type=cloud_memcache.CreateInstanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["instance_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - assert "instanceId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == request_init["instance_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["instanceId"] = "instance_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == "instance_id_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_instance(request) - - expected_params = [ - ( - "instanceId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_instance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("instanceId",)) - & set( - ( - "parent", - "instanceId", - "resource", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_create_instance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_create_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.CreateInstanceRequest.pb( - cloud_memcache.CreateInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.CreateInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_instance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.CreateInstanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["resource"] = { - "name": "name_value", - "display_name": "display_name_value", - "labels": {}, - "authorized_network": "authorized_network_value", - "zones": ["zones_value1", "zones_value2"], - "node_count": 1070, - "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, - "memcache_version": 1, - "parameters": {"id": "id_value", "params": {}}, - "memcache_nodes": [ - { - "node_id": "node_id_value", - "zone": "zone_value", - "state": 1, - "host": "host_value", - "port": 453, - "parameters": {}, - "update_available": True, - } - ], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "memcache_full_version": "memcache_full_version_value", - "instance_messages": [{"code": 1, "message": "message_value"}], - "discovery_endpoint": "discovery_endpoint_value", - "update_available": True, - "maintenance_policy": { - "create_time": {}, - "update_time": {}, - "description": "description_value", - "weekly_maintenance_window": [ - { - "day": 1, - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "duration": {"seconds": 751, "nanos": 543}, - } - ], - }, - "maintenance_schedule": { - "start_time": {}, - "end_time": {}, - "schedule_deadline_time": {}, - }, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_instance(request) - - -def test_create_instance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - instance_id="instance_id_value", - resource=cloud_memcache.Instance(name="name_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{parent=projects/*/locations/*}/instances" - % client.transport._host, - args[1], - ) - - -def test_create_instance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - cloud_memcache.CreateInstanceRequest(), - parent="parent_value", - instance_id="instance_id_value", - resource=cloud_memcache.Instance(name="name_value"), - ) - - -def test_create_instance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.UpdateInstanceRequest, - dict, - ], -) -def test_update_instance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "resource": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - request_init["resource"] = { - "name": "projects/sample1/locations/sample2/instances/sample3", - "display_name": "display_name_value", - "labels": {}, - "authorized_network": "authorized_network_value", - "zones": ["zones_value1", "zones_value2"], - "node_count": 1070, - "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, - "memcache_version": 1, - "parameters": {"id": "id_value", "params": {}}, - "memcache_nodes": [ - { - "node_id": "node_id_value", - "zone": "zone_value", - "state": 1, - "host": "host_value", - "port": 453, - "parameters": {}, - "update_available": True, - } - ], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "memcache_full_version": "memcache_full_version_value", - "instance_messages": [{"code": 1, "message": "message_value"}], - "discovery_endpoint": "discovery_endpoint_value", - "update_available": True, - "maintenance_policy": { - "create_time": {}, - "update_time": {}, - "description": "description_value", - "weekly_maintenance_window": [ - { - "day": 1, - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "duration": {"seconds": 751, "nanos": 543}, - } - ], - }, - "maintenance_schedule": { - "start_time": {}, - "end_time": {}, - "schedule_deadline_time": {}, - }, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_instance_rest_required_fields( - request_type=cloud_memcache.UpdateInstanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_instance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "updateMask", - "resource", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_update_instance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_update_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.UpdateInstanceRequest.pb( - cloud_memcache.UpdateInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.UpdateInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_instance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.UpdateInstanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "resource": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - request_init["resource"] = { - "name": "projects/sample1/locations/sample2/instances/sample3", - "display_name": "display_name_value", - "labels": {}, - "authorized_network": "authorized_network_value", - "zones": ["zones_value1", "zones_value2"], - "node_count": 1070, - "node_config": {"cpu_count": 976, "memory_size_mb": 1505}, - "memcache_version": 1, - "parameters": {"id": "id_value", "params": {}}, - "memcache_nodes": [ - { - "node_id": "node_id_value", - "zone": "zone_value", - "state": 1, - "host": "host_value", - "port": 453, - "parameters": {}, - "update_available": True, - } - ], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "memcache_full_version": "memcache_full_version_value", - "instance_messages": [{"code": 1, "message": "message_value"}], - "discovery_endpoint": "discovery_endpoint_value", - "update_available": True, - "maintenance_policy": { - "create_time": {}, - "update_time": {}, - "description": "description_value", - "weekly_maintenance_window": [ - { - "day": 1, - "start_time": { - "hours": 561, - "minutes": 773, - "seconds": 751, - "nanos": 543, - }, - "duration": {"seconds": 751, "nanos": 543}, - } - ], - }, - "maintenance_schedule": { - "start_time": {}, - "end_time": {}, - "schedule_deadline_time": {}, - }, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_instance(request) - - -def test_update_instance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "resource": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - - # get truthy value for each flattened field - mock_args = dict( - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - resource=cloud_memcache.Instance(name="name_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{resource.name=projects/*/locations/*/instances/*}" - % client.transport._host, - args[1], - ) - - -def test_update_instance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - cloud_memcache.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - resource=cloud_memcache.Instance(name="name_value"), - ) - - -def test_update_instance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.UpdateParametersRequest, - dict, - ], -) -def test_update_parameters_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_parameters(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_parameters_rest_required_fields( - request_type=cloud_memcache.UpdateParametersRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_parameters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_parameters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_parameters(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_parameters_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_parameters._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_parameters_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_update_parameters" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_update_parameters" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.UpdateParametersRequest.pb( - cloud_memcache.UpdateParametersRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.UpdateParametersRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_parameters( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_parameters_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.UpdateParametersRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_parameters(request) - - -def test_update_parameters_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_parameters(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{name=projects/*/locations/*/instances/*}:updateParameters" - % client.transport._host, - args[1], - ) - - -def test_update_parameters_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_parameters( - cloud_memcache.UpdateParametersRequest(), - name="name_value", - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - parameters=cloud_memcache.MemcacheParameters(id="id_value"), - ) - - -def test_update_parameters_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.DeleteInstanceRequest, - dict, - ], -) -def test_delete_instance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_instance_rest_required_fields( - request_type=cloud_memcache.DeleteInstanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_instance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_delete_instance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_delete_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.DeleteInstanceRequest.pb( - cloud_memcache.DeleteInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.DeleteInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_instance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.DeleteInstanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_instance(request) - - -def test_delete_instance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{name=projects/*/locations/*/instances/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_instance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - cloud_memcache.DeleteInstanceRequest(), - name="name_value", - ) - - -def test_delete_instance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ApplyParametersRequest, - dict, - ], -) -def test_apply_parameters_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.apply_parameters(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_apply_parameters_rest_required_fields( - request_type=cloud_memcache.ApplyParametersRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).apply_parameters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).apply_parameters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.apply_parameters(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_apply_parameters_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.apply_parameters._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_apply_parameters_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_apply_parameters" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_apply_parameters" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.ApplyParametersRequest.pb( - cloud_memcache.ApplyParametersRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.ApplyParametersRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.apply_parameters( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_apply_parameters_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.ApplyParametersRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.apply_parameters(request) - - -def test_apply_parameters_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.apply_parameters(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{name=projects/*/locations/*/instances/*}:applyParameters" - % client.transport._host, - args[1], - ) - - -def test_apply_parameters_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.apply_parameters( - cloud_memcache.ApplyParametersRequest(), - name="name_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -def test_apply_parameters_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.ApplySoftwareUpdateRequest, - dict, - ], -) -def test_apply_software_update_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.apply_software_update(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_apply_software_update_rest_required_fields( - request_type=cloud_memcache.ApplySoftwareUpdateRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["instance"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).apply_software_update._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["instance"] = "instance_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).apply_software_update._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "instance" in jsonified_request - assert jsonified_request["instance"] == "instance_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.apply_software_update(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_apply_software_update_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.apply_software_update._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("instance",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_apply_software_update_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_apply_software_update" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_apply_software_update" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.ApplySoftwareUpdateRequest.pb( - cloud_memcache.ApplySoftwareUpdateRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.ApplySoftwareUpdateRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.apply_software_update( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_apply_software_update_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.ApplySoftwareUpdateRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.apply_software_update(request) - - -def test_apply_software_update_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "instance": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - instance="instance_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.apply_software_update(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{instance=projects/*/locations/*/instances/*}:applySoftwareUpdate" - % client.transport._host, - args[1], - ) - - -def test_apply_software_update_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.apply_software_update( - cloud_memcache.ApplySoftwareUpdateRequest(), - instance="instance_value", - node_ids=["node_ids_value"], - apply_all=True, - ) - - -def test_apply_software_update_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - cloud_memcache.RescheduleMaintenanceRequest, - dict, - ], -) -def test_reschedule_maintenance_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.reschedule_maintenance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_reschedule_maintenance_rest_required_fields( - request_type=cloud_memcache.RescheduleMaintenanceRequest, -): - transport_class = transports.CloudMemcacheRestTransport - - request_init = {} - request_init["instance"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).reschedule_maintenance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["instance"] = "instance_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).reschedule_maintenance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "instance" in jsonified_request - assert jsonified_request["instance"] == "instance_value" - - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.reschedule_maintenance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_reschedule_maintenance_rest_unset_required_fields(): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "instance", - "rescheduleType", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reschedule_maintenance_rest_interceptors(null_interceptor): - transport = transports.CloudMemcacheRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CloudMemcacheRestInterceptor(), - ) - client = CloudMemcacheClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CloudMemcacheRestInterceptor, "post_reschedule_maintenance" - ) as post, mock.patch.object( - transports.CloudMemcacheRestInterceptor, "pre_reschedule_maintenance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_memcache.RescheduleMaintenanceRequest.pb( - cloud_memcache.RescheduleMaintenanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = cloud_memcache.RescheduleMaintenanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.reschedule_maintenance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reschedule_maintenance_rest_bad_request( - transport: str = "rest", request_type=cloud_memcache.RescheduleMaintenanceRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reschedule_maintenance(request) - - -def test_reschedule_maintenance_rest_flattened(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "instance": "projects/sample1/locations/sample2/instances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.reschedule_maintenance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta2/{instance=projects/*/locations/*/instances/*}:rescheduleMaintenance" - % client.transport._host, - args[1], - ) - - -def test_reschedule_maintenance_rest_flattened_error(transport: str = "rest"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reschedule_maintenance( - cloud_memcache.RescheduleMaintenanceRequest(), - instance="instance_value", - reschedule_type=cloud_memcache.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -def test_reschedule_maintenance_rest_error(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudMemcacheClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudMemcacheClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudMemcacheGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CloudMemcacheGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - transports.CloudMemcacheRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = CloudMemcacheClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudMemcacheGrpcTransport, - ) - - -def test_cloud_memcache_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_cloud_memcache_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.CloudMemcacheTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_instances", - "get_instance", - "create_instance", - "update_instance", - "update_parameters", - "delete_instance", - "apply_parameters", - "apply_software_update", - "reschedule_maintenance", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_cloud_memcache_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_cloud_memcache_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.memcache_v1beta2.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudMemcacheTransport() - adc.assert_called_once() - - -def test_cloud_memcache_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudMemcacheClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - transports.CloudMemcacheRestTransport, - ], -) -def test_cloud_memcache_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudMemcacheGrpcTransport, grpc_helpers), - (transports.CloudMemcacheGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_cloud_memcache_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "memcache.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="memcache.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_cloud_memcache_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.CloudMemcacheRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_cloud_memcache_rest_lro_client(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_cloud_memcache_host_no_port(transport_name): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="memcache.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "memcache.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://memcache.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_cloud_memcache_host_with_port(transport_name): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="memcache.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "memcache.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://memcache.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_cloud_memcache_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CloudMemcacheClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CloudMemcacheClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_instances._session - session2 = client2.transport.list_instances._session - assert session1 != session2 - session1 = client1.transport.get_instance._session - session2 = client2.transport.get_instance._session - assert session1 != session2 - session1 = client1.transport.create_instance._session - session2 = client2.transport.create_instance._session - assert session1 != session2 - session1 = client1.transport.update_instance._session - session2 = client2.transport.update_instance._session - assert session1 != session2 - session1 = client1.transport.update_parameters._session - session2 = client2.transport.update_parameters._session - assert session1 != session2 - session1 = client1.transport.delete_instance._session - session2 = client2.transport.delete_instance._session - assert session1 != session2 - session1 = client1.transport.apply_parameters._session - session2 = client2.transport.apply_parameters._session - assert session1 != session2 - session1 = client1.transport.apply_software_update._session - session2 = client2.transport.apply_software_update._session - assert session1 != session2 - session1 = client1.transport.reschedule_maintenance._session - session2 = client2.transport.reschedule_maintenance._session - assert session1 != session2 - - -def test_cloud_memcache_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudMemcacheGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cloud_memcache_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudMemcacheGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudMemcacheGrpcTransport, - transports.CloudMemcacheGrpcAsyncIOTransport, - ], -) -def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cloud_memcache_grpc_lro_client(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_cloud_memcache_grpc_lro_async_client(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_path(): - project = "squid" - location = "clam" - instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, - location=location, - instance=instance, - ) - actual = CloudMemcacheClient.instance_path(project, location, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", - } - path = CloudMemcacheClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_instance_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = CloudMemcacheClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = CloudMemcacheClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = CloudMemcacheClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = CloudMemcacheClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = CloudMemcacheClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = CloudMemcacheClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = CloudMemcacheClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = CloudMemcacheClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = CloudMemcacheClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = CloudMemcacheClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CloudMemcacheClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.CloudMemcacheTransport, "_prep_wrapped_messages" - ) as prep: - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.CloudMemcacheTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = CloudMemcacheClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_delete_operation_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_operation_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_cancel_operation_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_get_operation_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_get_operation_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_list_operations_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_list_operations_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_list_locations_field_headers(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_list_locations_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_get_location_field_headers(): - client = CloudMemcacheClient(credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = CloudMemcacheAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] - - -def test_get_location_from_dict(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = CloudMemcacheAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - "rest", - "grpc", - ] - for transport in transports: - client = CloudMemcacheClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - - -@pytest.mark.parametrize( - "client_class,transport_class", - [ - (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport), - (CloudMemcacheAsyncClient, transports.CloudMemcacheGrpcAsyncIOTransport), - ], -) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - )